23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 492 #include <vulkan/vulkan.h> 494 VK_DEFINE_HANDLE(VmaAllocator)
498 VmaAllocator allocator,
500 VkDeviceMemory memory,
504 VmaAllocator allocator,
506 VkDeviceMemory memory,
658 VmaAllocator* pAllocator);
662 VmaAllocator allocator);
669 VmaAllocator allocator,
670 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
677 VmaAllocator allocator,
678 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
687 VmaAllocator allocator,
688 uint32_t memoryTypeIndex,
689 VkMemoryPropertyFlags* pFlags);
700 VmaAllocator allocator,
701 uint32_t frameIndex);
731 VmaAllocator allocator,
734 #define VMA_STATS_STRING_ENABLED 1 736 #if VMA_STATS_STRING_ENABLED 742 VmaAllocator allocator,
743 char** ppStatsString,
744 VkBool32 detailedMap);
747 VmaAllocator allocator,
750 #endif // #if VMA_STATS_STRING_ENABLED 752 VK_DEFINE_HANDLE(VmaPool)
881 VmaAllocator allocator,
882 uint32_t memoryTypeBits,
884 uint32_t* pMemoryTypeIndex);
994 VmaAllocator allocator,
1001 VmaAllocator allocator,
1011 VmaAllocator allocator,
1022 VmaAllocator allocator,
1024 size_t* pLostAllocationCount);
1026 VK_DEFINE_HANDLE(VmaAllocation)
1079 VmaAllocator allocator,
1080 const VkMemoryRequirements* pVkMemoryRequirements,
1082 VmaAllocation* pAllocation,
1092 VmaAllocator allocator,
1095 VmaAllocation* pAllocation,
1100 VmaAllocator allocator,
1103 VmaAllocation* pAllocation,
1108 VmaAllocator allocator,
1109 VmaAllocation allocation);
1113 VmaAllocator allocator,
1114 VmaAllocation allocation,
1119 VmaAllocator allocator,
1120 VmaAllocation allocation,
1134 VmaAllocator allocator,
1135 VmaAllocation* pAllocation);
1146 VmaAllocator allocator,
1147 VmaAllocation allocation,
1151 VmaAllocator allocator,
1152 VmaAllocation allocation);
1285 VmaAllocator allocator,
1286 VmaAllocation* pAllocations,
1287 size_t allocationCount,
1288 VkBool32* pAllocationsChanged,
1319 VmaAllocator allocator,
1320 const VkBufferCreateInfo* pBufferCreateInfo,
1323 VmaAllocation* pAllocation,
1338 VmaAllocator allocator,
1340 VmaAllocation allocation);
1344 VmaAllocator allocator,
1345 const VkImageCreateInfo* pImageCreateInfo,
1348 VmaAllocation* pAllocation,
1363 VmaAllocator allocator,
1365 VmaAllocation allocation);
1371 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1374 #ifdef __INTELLISENSE__ 1375 #define VMA_IMPLEMENTATION 1378 #ifdef VMA_IMPLEMENTATION 1379 #undef VMA_IMPLEMENTATION 1401 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1402 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1414 #if VMA_USE_STL_CONTAINERS 1415 #define VMA_USE_STL_VECTOR 1 1416 #define VMA_USE_STL_UNORDERED_MAP 1 1417 #define VMA_USE_STL_LIST 1 1420 #if VMA_USE_STL_VECTOR 1424 #if VMA_USE_STL_UNORDERED_MAP 1425 #include <unordered_map> 1428 #if VMA_USE_STL_LIST 1437 #include <algorithm> 1441 #if !defined(_WIN32) 1448 #define VMA_ASSERT(expr) assert(expr) 1450 #define VMA_ASSERT(expr) 1456 #ifndef VMA_HEAVY_ASSERT 1458 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1460 #define VMA_HEAVY_ASSERT(expr) 1466 #define VMA_NULL nullptr 1469 #ifndef VMA_ALIGN_OF 1470 #define VMA_ALIGN_OF(type) (__alignof(type)) 1473 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1475 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1477 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1481 #ifndef VMA_SYSTEM_FREE 1483 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1485 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1490 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1494 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1498 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1502 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1505 #ifndef VMA_DEBUG_LOG 1506 #define VMA_DEBUG_LOG(format, ...) 1516 #if VMA_STATS_STRING_ENABLED 1517 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1519 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1521 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1523 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1525 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1527 snprintf(outStr, strLen,
"%p", ptr);
1537 void Lock() { m_Mutex.lock(); }
1538 void Unlock() { m_Mutex.unlock(); }
1542 #define VMA_MUTEX VmaMutex 1553 #ifndef VMA_ATOMIC_UINT32 1554 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1557 #ifndef VMA_BEST_FIT 1570 #define VMA_BEST_FIT (1) 1573 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 1578 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 1581 #ifndef VMA_DEBUG_ALIGNMENT 1586 #define VMA_DEBUG_ALIGNMENT (1) 1589 #ifndef VMA_DEBUG_MARGIN 1594 #define VMA_DEBUG_MARGIN (0) 1597 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1602 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1605 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1610 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1613 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1614 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1618 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1619 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1623 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1624 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1628 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1634 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1635 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1638 static inline uint32_t CountBitsSet(uint32_t v)
1640 uint32_t c = v - ((v >> 1) & 0x55555555);
1641 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1642 c = ((c >> 4) + c) & 0x0F0F0F0F;
1643 c = ((c >> 8) + c) & 0x00FF00FF;
1644 c = ((c >> 16) + c) & 0x0000FFFF;
1650 template <
typename T>
1651 static inline T VmaAlignUp(T val, T align)
1653 return (val + align - 1) / align * align;
1657 template <
typename T>
1658 inline T VmaRoundDiv(T x, T y)
1660 return (x + (y / (T)2)) / y;
1665 template<
typename Iterator,
typename Compare>
1666 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1668 Iterator centerValue = end; --centerValue;
1669 Iterator insertIndex = beg;
1670 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1672 if(cmp(*memTypeIndex, *centerValue))
1674 if(insertIndex != memTypeIndex)
1676 VMA_SWAP(*memTypeIndex, *insertIndex);
1681 if(insertIndex != centerValue)
1683 VMA_SWAP(*insertIndex, *centerValue);
1688 template<
typename Iterator,
typename Compare>
1689 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1693 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1694 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1695 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1699 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1701 #endif // #ifndef VMA_SORT 1710 static inline bool VmaBlocksOnSamePage(
1711 VkDeviceSize resourceAOffset,
1712 VkDeviceSize resourceASize,
1713 VkDeviceSize resourceBOffset,
1714 VkDeviceSize pageSize)
1716 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1717 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1718 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1719 VkDeviceSize resourceBStart = resourceBOffset;
1720 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1721 return resourceAEndPage == resourceBStartPage;
1724 enum VmaSuballocationType
1726 VMA_SUBALLOCATION_TYPE_FREE = 0,
1727 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1728 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1729 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1730 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1731 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1732 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1741 static inline bool VmaIsBufferImageGranularityConflict(
1742 VmaSuballocationType suballocType1,
1743 VmaSuballocationType suballocType2)
1745 if(suballocType1 > suballocType2)
1747 VMA_SWAP(suballocType1, suballocType2);
1750 switch(suballocType1)
1752 case VMA_SUBALLOCATION_TYPE_FREE:
1754 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1756 case VMA_SUBALLOCATION_TYPE_BUFFER:
1758 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1759 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1760 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1762 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1763 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1764 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1765 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1767 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1768 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1780 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1781 m_pMutex(useMutex ? &mutex : VMA_NULL)
1798 VMA_MUTEX* m_pMutex;
1801 #if VMA_DEBUG_GLOBAL_MUTEX 1802 static VMA_MUTEX gDebugGlobalMutex;
1803 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1805 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1809 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1820 template <
typename IterT,
typename KeyT,
typename CmpT>
1821 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1823 size_t down = 0, up = (end - beg);
1826 const size_t mid = (down + up) / 2;
1827 if(cmp(*(beg+mid), key))
1842 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1844 if((pAllocationCallbacks != VMA_NULL) &&
1845 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1847 return (*pAllocationCallbacks->pfnAllocation)(
1848 pAllocationCallbacks->pUserData,
1851 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1855 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1859 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1861 if((pAllocationCallbacks != VMA_NULL) &&
1862 (pAllocationCallbacks->pfnFree != VMA_NULL))
1864 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1868 VMA_SYSTEM_FREE(ptr);
1872 template<
typename T>
1873 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1875 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1878 template<
typename T>
1879 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1881 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1884 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1886 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1888 template<
typename T>
1889 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1892 VmaFree(pAllocationCallbacks, ptr);
1895 template<
typename T>
1896 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1900 for(
size_t i = count; i--; )
1904 VmaFree(pAllocationCallbacks, ptr);
1909 template<
typename T>
1910 class VmaStlAllocator
1913 const VkAllocationCallbacks*
const m_pCallbacks;
1914 typedef T value_type;
1916 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1917 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1919 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1920 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1922 template<
typename U>
1923 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1925 return m_pCallbacks == rhs.m_pCallbacks;
1927 template<
typename U>
1928 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1930 return m_pCallbacks != rhs.m_pCallbacks;
1933 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1936 #if VMA_USE_STL_VECTOR 1938 #define VmaVector std::vector 1940 template<
typename T,
typename allocatorT>
1941 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1943 vec.insert(vec.begin() + index, item);
1946 template<
typename T,
typename allocatorT>
1947 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1949 vec.erase(vec.begin() + index);
1952 #else // #if VMA_USE_STL_VECTOR 1957 template<
typename T,
typename AllocatorT>
1961 typedef T value_type;
1963 VmaVector(
const AllocatorT& allocator) :
1964 m_Allocator(allocator),
1971 VmaVector(
size_t count,
const AllocatorT& allocator) :
1972 m_Allocator(allocator),
1973 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1979 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1980 m_Allocator(src.m_Allocator),
1981 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
1982 m_Count(src.m_Count),
1983 m_Capacity(src.m_Count)
1987 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1993 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1996 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2000 resize(rhs.m_Count);
2003 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2009 bool empty()
const {
return m_Count == 0; }
2010 size_t size()
const {
return m_Count; }
2011 T* data() {
return m_pArray; }
2012 const T* data()
const {
return m_pArray; }
2014 T& operator[](
size_t index)
2016 VMA_HEAVY_ASSERT(index < m_Count);
2017 return m_pArray[index];
2019 const T& operator[](
size_t index)
const 2021 VMA_HEAVY_ASSERT(index < m_Count);
2022 return m_pArray[index];
2027 VMA_HEAVY_ASSERT(m_Count > 0);
2030 const T& front()
const 2032 VMA_HEAVY_ASSERT(m_Count > 0);
2037 VMA_HEAVY_ASSERT(m_Count > 0);
2038 return m_pArray[m_Count - 1];
2040 const T& back()
const 2042 VMA_HEAVY_ASSERT(m_Count > 0);
2043 return m_pArray[m_Count - 1];
2046 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2048 newCapacity = VMA_MAX(newCapacity, m_Count);
2050 if((newCapacity < m_Capacity) && !freeMemory)
2052 newCapacity = m_Capacity;
2055 if(newCapacity != m_Capacity)
2057 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2060 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2062 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2063 m_Capacity = newCapacity;
2064 m_pArray = newArray;
2068 void resize(
size_t newCount,
bool freeMemory =
false)
2070 size_t newCapacity = m_Capacity;
2071 if(newCount > m_Capacity)
2073 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2077 newCapacity = newCount;
2080 if(newCapacity != m_Capacity)
2082 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2083 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2084 if(elementsToCopy != 0)
2086 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2088 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2089 m_Capacity = newCapacity;
2090 m_pArray = newArray;
2096 void clear(
bool freeMemory =
false)
2098 resize(0, freeMemory);
2101 void insert(
size_t index,
const T& src)
2103 VMA_HEAVY_ASSERT(index <= m_Count);
2104 const size_t oldCount = size();
2105 resize(oldCount + 1);
2106 if(index < oldCount)
2108 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2110 m_pArray[index] = src;
2113 void remove(
size_t index)
2115 VMA_HEAVY_ASSERT(index < m_Count);
2116 const size_t oldCount = size();
2117 if(index < oldCount - 1)
2119 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2121 resize(oldCount - 1);
2124 void push_back(
const T& src)
2126 const size_t newIndex = size();
2127 resize(newIndex + 1);
2128 m_pArray[newIndex] = src;
2133 VMA_HEAVY_ASSERT(m_Count > 0);
2137 void push_front(
const T& src)
2144 VMA_HEAVY_ASSERT(m_Count > 0);
2148 typedef T* iterator;
2150 iterator begin() {
return m_pArray; }
2151 iterator end() {
return m_pArray + m_Count; }
2154 AllocatorT m_Allocator;
2160 template<
typename T,
typename allocatorT>
2161 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2163 vec.insert(index, item);
2166 template<
typename T,
typename allocatorT>
2167 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2172 #endif // #if VMA_USE_STL_VECTOR 2174 template<
typename CmpLess,
typename VectorT>
2175 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2177 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2179 vector.data() + vector.size(),
2181 CmpLess()) - vector.data();
2182 VmaVectorInsert(vector, indexToInsert, value);
2183 return indexToInsert;
2186 template<
typename CmpLess,
typename VectorT>
2187 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2190 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2195 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2197 size_t indexToRemove = it - vector.begin();
2198 VmaVectorRemove(vector, indexToRemove);
2204 template<
typename CmpLess,
typename VectorT>
2205 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2208 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2210 vector.data() + vector.size(),
2213 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2215 return it - vector.begin();
2219 return vector.size();
2231 template<
typename T>
2232 class VmaPoolAllocator
2235 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2236 ~VmaPoolAllocator();
2244 uint32_t NextFreeIndex;
2251 uint32_t FirstFreeIndex;
2254 const VkAllocationCallbacks* m_pAllocationCallbacks;
2255 size_t m_ItemsPerBlock;
2256 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2258 ItemBlock& CreateNewBlock();
2261 template<
typename T>
2262 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2263 m_pAllocationCallbacks(pAllocationCallbacks),
2264 m_ItemsPerBlock(itemsPerBlock),
2265 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2267 VMA_ASSERT(itemsPerBlock > 0);
2270 template<
typename T>
2271 VmaPoolAllocator<T>::~VmaPoolAllocator()
2276 template<
typename T>
2277 void VmaPoolAllocator<T>::Clear()
2279 for(
size_t i = m_ItemBlocks.size(); i--; )
2280 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2281 m_ItemBlocks.clear();
2284 template<
typename T>
2285 T* VmaPoolAllocator<T>::Alloc()
2287 for(
size_t i = m_ItemBlocks.size(); i--; )
2289 ItemBlock& block = m_ItemBlocks[i];
2291 if(block.FirstFreeIndex != UINT32_MAX)
2293 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2294 block.FirstFreeIndex = pItem->NextFreeIndex;
2295 return &pItem->Value;
2300 ItemBlock& newBlock = CreateNewBlock();
2301 Item*
const pItem = &newBlock.pItems[0];
2302 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2303 return &pItem->Value;
2306 template<
typename T>
2307 void VmaPoolAllocator<T>::Free(T* ptr)
2310 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2312 ItemBlock& block = m_ItemBlocks[i];
2316 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2319 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2321 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2322 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2323 block.FirstFreeIndex = index;
2327 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2330 template<
typename T>
2331 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2333 ItemBlock newBlock = {
2334 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2336 m_ItemBlocks.push_back(newBlock);
2339 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2340 newBlock.pItems[i].NextFreeIndex = i + 1;
2341 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2342 return m_ItemBlocks.back();
2348 #if VMA_USE_STL_LIST 2350 #define VmaList std::list 2352 #else // #if VMA_USE_STL_LIST 2354 template<
typename T>
2363 template<
typename T>
2367 typedef VmaListItem<T> ItemType;
2369 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2373 size_t GetCount()
const {
return m_Count; }
2374 bool IsEmpty()
const {
return m_Count == 0; }
2376 ItemType* Front() {
return m_pFront; }
2377 const ItemType* Front()
const {
return m_pFront; }
2378 ItemType* Back() {
return m_pBack; }
2379 const ItemType* Back()
const {
return m_pBack; }
2381 ItemType* PushBack();
2382 ItemType* PushFront();
2383 ItemType* PushBack(
const T& value);
2384 ItemType* PushFront(
const T& value);
2389 ItemType* InsertBefore(ItemType* pItem);
2391 ItemType* InsertAfter(ItemType* pItem);
2393 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2394 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2396 void Remove(ItemType* pItem);
2399 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2400 VmaPoolAllocator<ItemType> m_ItemAllocator;
2406 VmaRawList(
const VmaRawList<T>& src);
2407 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2410 template<
typename T>
2411 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2412 m_pAllocationCallbacks(pAllocationCallbacks),
2413 m_ItemAllocator(pAllocationCallbacks, 128),
2420 template<
typename T>
2421 VmaRawList<T>::~VmaRawList()
2427 template<
typename T>
2428 void VmaRawList<T>::Clear()
2430 if(IsEmpty() ==
false)
2432 ItemType* pItem = m_pBack;
2433 while(pItem != VMA_NULL)
2435 ItemType*
const pPrevItem = pItem->pPrev;
2436 m_ItemAllocator.Free(pItem);
2439 m_pFront = VMA_NULL;
2445 template<
typename T>
2446 VmaListItem<T>* VmaRawList<T>::PushBack()
2448 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2449 pNewItem->pNext = VMA_NULL;
2452 pNewItem->pPrev = VMA_NULL;
2453 m_pFront = pNewItem;
2459 pNewItem->pPrev = m_pBack;
2460 m_pBack->pNext = pNewItem;
2467 template<
typename T>
2468 VmaListItem<T>* VmaRawList<T>::PushFront()
2470 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2471 pNewItem->pPrev = VMA_NULL;
2474 pNewItem->pNext = VMA_NULL;
2475 m_pFront = pNewItem;
2481 pNewItem->pNext = m_pFront;
2482 m_pFront->pPrev = pNewItem;
2483 m_pFront = pNewItem;
2489 template<
typename T>
2490 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2492 ItemType*
const pNewItem = PushBack();
2493 pNewItem->Value = value;
2497 template<
typename T>
2498 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2500 ItemType*
const pNewItem = PushFront();
2501 pNewItem->Value = value;
2505 template<
typename T>
2506 void VmaRawList<T>::PopBack()
2508 VMA_HEAVY_ASSERT(m_Count > 0);
2509 ItemType*
const pBackItem = m_pBack;
2510 ItemType*
const pPrevItem = pBackItem->pPrev;
2511 if(pPrevItem != VMA_NULL)
2513 pPrevItem->pNext = VMA_NULL;
2515 m_pBack = pPrevItem;
2516 m_ItemAllocator.Free(pBackItem);
2520 template<
typename T>
2521 void VmaRawList<T>::PopFront()
2523 VMA_HEAVY_ASSERT(m_Count > 0);
2524 ItemType*
const pFrontItem = m_pFront;
2525 ItemType*
const pNextItem = pFrontItem->pNext;
2526 if(pNextItem != VMA_NULL)
2528 pNextItem->pPrev = VMA_NULL;
2530 m_pFront = pNextItem;
2531 m_ItemAllocator.Free(pFrontItem);
2535 template<
typename T>
2536 void VmaRawList<T>::Remove(ItemType* pItem)
2538 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2539 VMA_HEAVY_ASSERT(m_Count > 0);
2541 if(pItem->pPrev != VMA_NULL)
2543 pItem->pPrev->pNext = pItem->pNext;
2547 VMA_HEAVY_ASSERT(m_pFront == pItem);
2548 m_pFront = pItem->pNext;
2551 if(pItem->pNext != VMA_NULL)
2553 pItem->pNext->pPrev = pItem->pPrev;
2557 VMA_HEAVY_ASSERT(m_pBack == pItem);
2558 m_pBack = pItem->pPrev;
2561 m_ItemAllocator.Free(pItem);
2565 template<
typename T>
2566 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2568 if(pItem != VMA_NULL)
2570 ItemType*
const prevItem = pItem->pPrev;
2571 ItemType*
const newItem = m_ItemAllocator.Alloc();
2572 newItem->pPrev = prevItem;
2573 newItem->pNext = pItem;
2574 pItem->pPrev = newItem;
2575 if(prevItem != VMA_NULL)
2577 prevItem->pNext = newItem;
2581 VMA_HEAVY_ASSERT(m_pFront == pItem);
2591 template<
typename T>
2592 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2594 if(pItem != VMA_NULL)
2596 ItemType*
const nextItem = pItem->pNext;
2597 ItemType*
const newItem = m_ItemAllocator.Alloc();
2598 newItem->pNext = nextItem;
2599 newItem->pPrev = pItem;
2600 pItem->pNext = newItem;
2601 if(nextItem != VMA_NULL)
2603 nextItem->pPrev = newItem;
2607 VMA_HEAVY_ASSERT(m_pBack == pItem);
2617 template<
typename T>
2618 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2620 ItemType*
const newItem = InsertBefore(pItem);
2621 newItem->Value = value;
2625 template<
typename T>
2626 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2628 ItemType*
const newItem = InsertAfter(pItem);
2629 newItem->Value = value;
2633 template<
typename T,
typename AllocatorT>
2646 T& operator*()
const 2648 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2649 return m_pItem->Value;
2651 T* operator->()
const 2653 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2654 return &m_pItem->Value;
2657 iterator& operator++()
2659 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2660 m_pItem = m_pItem->pNext;
2663 iterator& operator--()
2665 if(m_pItem != VMA_NULL)
2667 m_pItem = m_pItem->pPrev;
2671 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2672 m_pItem = m_pList->Back();
2677 iterator operator++(
int)
2679 iterator result = *
this;
2683 iterator operator--(
int)
2685 iterator result = *
this;
2690 bool operator==(
const iterator& rhs)
const 2692 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2693 return m_pItem == rhs.m_pItem;
2695 bool operator!=(
const iterator& rhs)
const 2697 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2698 return m_pItem != rhs.m_pItem;
2702 VmaRawList<T>* m_pList;
2703 VmaListItem<T>* m_pItem;
2705 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2711 friend class VmaList<T, AllocatorT>;
2714 class const_iterator
2723 const_iterator(
const iterator& src) :
2724 m_pList(src.m_pList),
2725 m_pItem(src.m_pItem)
2729 const T& operator*()
const 2731 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2732 return m_pItem->Value;
2734 const T* operator->()
const 2736 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2737 return &m_pItem->Value;
2740 const_iterator& operator++()
2742 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2743 m_pItem = m_pItem->pNext;
2746 const_iterator& operator--()
2748 if(m_pItem != VMA_NULL)
2750 m_pItem = m_pItem->pPrev;
2754 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2755 m_pItem = m_pList->Back();
2760 const_iterator operator++(
int)
2762 const_iterator result = *
this;
2766 const_iterator operator--(
int)
2768 const_iterator result = *
this;
2773 bool operator==(
const const_iterator& rhs)
const 2775 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2776 return m_pItem == rhs.m_pItem;
2778 bool operator!=(
const const_iterator& rhs)
const 2780 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2781 return m_pItem != rhs.m_pItem;
2785 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2791 const VmaRawList<T>* m_pList;
2792 const VmaListItem<T>* m_pItem;
2794 friend class VmaList<T, AllocatorT>;
2797 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2799 bool empty()
const {
return m_RawList.IsEmpty(); }
2800 size_t size()
const {
return m_RawList.GetCount(); }
2802 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2803 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2805 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2806 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2808 void clear() { m_RawList.Clear(); }
2809 void push_back(
const T& value) { m_RawList.PushBack(value); }
2810 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2811 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2814 VmaRawList<T> m_RawList;
2817 #endif // #if VMA_USE_STL_LIST 2825 #if VMA_USE_STL_UNORDERED_MAP 2827 #define VmaPair std::pair 2829 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2830 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2832 #else // #if VMA_USE_STL_UNORDERED_MAP 2834 template<
typename T1,
typename T2>
2840 VmaPair() : first(), second() { }
2841 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2847 template<
typename KeyT,
typename ValueT>
2851 typedef VmaPair<KeyT, ValueT> PairType;
2852 typedef PairType* iterator;
2854 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2856 iterator begin() {
return m_Vector.begin(); }
2857 iterator end() {
return m_Vector.end(); }
2859 void insert(
const PairType& pair);
2860 iterator find(
const KeyT& key);
2861 void erase(iterator it);
2864 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2867 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2869 template<
typename FirstT,
typename SecondT>
2870 struct VmaPairFirstLess
2872 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2874 return lhs.first < rhs.first;
2876 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2878 return lhs.first < rhsFirst;
2882 template<
typename KeyT,
typename ValueT>
2883 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2885 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2887 m_Vector.data() + m_Vector.size(),
2889 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2890 VmaVectorInsert(m_Vector, indexToInsert, pair);
2893 template<
typename KeyT,
typename ValueT>
2894 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2896 PairType* it = VmaBinaryFindFirstNotLess(
2898 m_Vector.data() + m_Vector.size(),
2900 VmaPairFirstLess<KeyT, ValueT>());
2901 if((it != m_Vector.end()) && (it->first == key))
2907 return m_Vector.end();
2911 template<
typename KeyT,
typename ValueT>
2912 void VmaMap<KeyT, ValueT>::erase(iterator it)
2914 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2917 #endif // #if VMA_USE_STL_UNORDERED_MAP 2923 class VmaDeviceMemoryBlock;
2925 enum VMA_BLOCK_VECTOR_TYPE
2927 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2928 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2929 VMA_BLOCK_VECTOR_TYPE_COUNT
2935 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2936 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2939 struct VmaAllocation_T
2942 enum ALLOCATION_TYPE
2944 ALLOCATION_TYPE_NONE,
2945 ALLOCATION_TYPE_BLOCK,
2946 ALLOCATION_TYPE_DEDICATED,
2949 VmaAllocation_T(uint32_t currentFrameIndex) :
2952 m_pUserData(VMA_NULL),
2953 m_Type(ALLOCATION_TYPE_NONE),
2954 m_SuballocationType(VMA_SUBALLOCATION_TYPE_UNKNOWN),
2955 m_LastUseFrameIndex(currentFrameIndex)
2959 void InitBlockAllocation(
2961 VmaDeviceMemoryBlock* block,
2962 VkDeviceSize offset,
2963 VkDeviceSize alignment,
2965 VmaSuballocationType suballocationType,
2969 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2970 VMA_ASSERT(block != VMA_NULL);
2971 m_Type = ALLOCATION_TYPE_BLOCK;
2972 m_Alignment = alignment;
2974 m_pUserData = pUserData;
2975 m_SuballocationType = suballocationType;
2976 m_BlockAllocation.m_hPool = hPool;
2977 m_BlockAllocation.m_Block = block;
2978 m_BlockAllocation.m_Offset = offset;
2979 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
2984 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2985 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
2986 m_Type = ALLOCATION_TYPE_BLOCK;
2987 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
2988 m_BlockAllocation.m_Block = VMA_NULL;
2989 m_BlockAllocation.m_Offset = 0;
2990 m_BlockAllocation.m_CanBecomeLost =
true;
2993 void ChangeBlockAllocation(
2994 VmaDeviceMemoryBlock* block,
2995 VkDeviceSize offset)
2997 VMA_ASSERT(block != VMA_NULL);
2998 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2999 m_BlockAllocation.m_Block = block;
3000 m_BlockAllocation.m_Offset = offset;
3003 void InitDedicatedAllocation(
3004 uint32_t memoryTypeIndex,
3005 VkDeviceMemory hMemory,
3006 VmaSuballocationType suballocationType,
3012 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3013 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3014 m_Type = ALLOCATION_TYPE_DEDICATED;
3017 m_pUserData = pUserData;
3018 m_SuballocationType = suballocationType;
3019 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3020 m_DedicatedAllocation.m_hMemory = hMemory;
3021 m_DedicatedAllocation.m_PersistentMap = persistentMap;
3022 m_DedicatedAllocation.m_pMappedData = pMappedData;
3025 ALLOCATION_TYPE GetType()
const {
return m_Type; }
3026 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3027 VkDeviceSize GetSize()
const {
return m_Size; }
3028 void* GetUserData()
const {
return m_pUserData; }
3029 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
3030 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
3032 VmaDeviceMemoryBlock* GetBlock()
const 3034 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3035 return m_BlockAllocation.m_Block;
3037 VkDeviceSize GetOffset()
const;
3038 VkDeviceMemory GetMemory()
const;
3039 uint32_t GetMemoryTypeIndex()
const;
3040 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
3041 void* GetMappedData()
const;
3042 bool CanBecomeLost()
const;
3043 VmaPool GetPool()
const;
3045 VkResult DedicatedAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator);
3046 void DedicatedAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator);
3048 uint32_t GetLastUseFrameIndex()
const 3050 return m_LastUseFrameIndex.load();
3052 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3054 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3064 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3066 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3068 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3080 VkDeviceSize m_Alignment;
3081 VkDeviceSize m_Size;
3083 ALLOCATION_TYPE m_Type;
3084 VmaSuballocationType m_SuballocationType;
3085 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3088 struct BlockAllocation
3091 VmaDeviceMemoryBlock* m_Block;
3092 VkDeviceSize m_Offset;
3093 bool m_CanBecomeLost;
3097 struct DedicatedAllocation
3099 uint32_t m_MemoryTypeIndex;
3100 VkDeviceMemory m_hMemory;
3101 bool m_PersistentMap;
3102 void* m_pMappedData;
3108 BlockAllocation m_BlockAllocation;
3110 DedicatedAllocation m_DedicatedAllocation;
3118 struct VmaSuballocation
3120 VkDeviceSize offset;
3122 VmaAllocation hAllocation;
3123 VmaSuballocationType type;
3126 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3129 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3144 struct VmaAllocationRequest
3146 VkDeviceSize offset;
3147 VkDeviceSize sumFreeSize;
3148 VkDeviceSize sumItemSize;
3149 VmaSuballocationList::iterator item;
3150 size_t itemsToMakeLostCount;
3152 VkDeviceSize CalcCost()
const 3154 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3162 class VmaBlockMetadata
3165 VmaBlockMetadata(VmaAllocator hAllocator);
3166 ~VmaBlockMetadata();
3167 void Init(VkDeviceSize size);
3170 bool Validate()
const;
3171 VkDeviceSize GetSize()
const {
return m_Size; }
3172 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3173 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3174 VkDeviceSize GetUnusedRangeSizeMax()
const;
3176 bool IsEmpty()
const;
3178 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3181 #if VMA_STATS_STRING_ENABLED 3182 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3186 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3191 bool CreateAllocationRequest(
3192 uint32_t currentFrameIndex,
3193 uint32_t frameInUseCount,
3194 VkDeviceSize bufferImageGranularity,
3195 VkDeviceSize allocSize,
3196 VkDeviceSize allocAlignment,
3197 VmaSuballocationType allocType,
3198 bool canMakeOtherLost,
3199 VmaAllocationRequest* pAllocationRequest);
3201 bool MakeRequestedAllocationsLost(
3202 uint32_t currentFrameIndex,
3203 uint32_t frameInUseCount,
3204 VmaAllocationRequest* pAllocationRequest);
3206 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3210 const VmaAllocationRequest& request,
3211 VmaSuballocationType type,
3212 VkDeviceSize allocSize,
3213 VmaAllocation hAllocation);
3216 void Free(
const VmaAllocation allocation);
3219 VkDeviceSize m_Size;
3220 uint32_t m_FreeCount;
3221 VkDeviceSize m_SumFreeSize;
3222 VmaSuballocationList m_Suballocations;
3225 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3227 bool ValidateFreeSuballocationList()
const;
3231 bool CheckAllocation(
3232 uint32_t currentFrameIndex,
3233 uint32_t frameInUseCount,
3234 VkDeviceSize bufferImageGranularity,
3235 VkDeviceSize allocSize,
3236 VkDeviceSize allocAlignment,
3237 VmaSuballocationType allocType,
3238 VmaSuballocationList::const_iterator suballocItem,
3239 bool canMakeOtherLost,
3240 VkDeviceSize* pOffset,
3241 size_t* itemsToMakeLostCount,
3242 VkDeviceSize* pSumFreeSize,
3243 VkDeviceSize* pSumItemSize)
const;
3245 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3249 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3252 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3255 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3264 class VmaDeviceMemoryBlock
3267 uint32_t m_MemoryTypeIndex;
3268 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3269 VkDeviceMemory m_hMemory;
3270 bool m_PersistentMap;
3271 void* m_pMappedData;
3272 VmaBlockMetadata m_Metadata;
3274 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3276 ~VmaDeviceMemoryBlock()
3278 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3283 uint32_t newMemoryTypeIndex,
3284 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
3285 VkDeviceMemory newMemory,
3286 VkDeviceSize newSize,
3290 void Destroy(VmaAllocator allocator);
3293 bool Validate()
const;
3296 struct VmaPointerLess
3298 bool operator()(
const void* lhs,
const void* rhs)
const 3304 class VmaDefragmentator;
3312 struct VmaBlockVector
3315 VmaAllocator hAllocator,
3316 uint32_t memoryTypeIndex,
3317 VMA_BLOCK_VECTOR_TYPE blockVectorType,
3318 VkDeviceSize preferredBlockSize,
3319 size_t minBlockCount,
3320 size_t maxBlockCount,
3321 VkDeviceSize bufferImageGranularity,
3322 uint32_t frameInUseCount,
3326 VkResult CreateMinBlocks();
3328 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3329 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3330 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3331 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3332 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const {
return m_BlockVectorType; }
3336 bool IsEmpty()
const {
return m_Blocks.empty(); }
3339 VmaPool hCurrentPool,
3340 uint32_t currentFrameIndex,
3341 const VkMemoryRequirements& vkMemReq,
3343 VmaSuballocationType suballocType,
3344 VmaAllocation* pAllocation);
3347 VmaAllocation hAllocation);
3352 #if VMA_STATS_STRING_ENABLED 3353 void PrintDetailedMap(
class VmaJsonWriter& json);
3356 void UnmapPersistentlyMappedMemory();
3357 VkResult MapPersistentlyMappedMemory();
3359 void MakePoolAllocationsLost(
3360 uint32_t currentFrameIndex,
3361 size_t* pLostAllocationCount);
3363 VmaDefragmentator* EnsureDefragmentator(
3364 VmaAllocator hAllocator,
3365 uint32_t currentFrameIndex);
3367 VkResult Defragment(
3369 VkDeviceSize& maxBytesToMove,
3370 uint32_t& maxAllocationsToMove);
3372 void DestroyDefragmentator();
3375 friend class VmaDefragmentator;
3377 const VmaAllocator m_hAllocator;
3378 const uint32_t m_MemoryTypeIndex;
3379 const VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3380 const VkDeviceSize m_PreferredBlockSize;
3381 const size_t m_MinBlockCount;
3382 const size_t m_MaxBlockCount;
3383 const VkDeviceSize m_BufferImageGranularity;
3384 const uint32_t m_FrameInUseCount;
3385 const bool m_IsCustomPool;
3388 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3392 bool m_HasEmptyBlock;
3393 VmaDefragmentator* m_pDefragmentator;
3396 void Remove(VmaDeviceMemoryBlock* pBlock);
3400 void IncrementallySortBlocks();
3402 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3408 VmaBlockVector m_BlockVector;
3412 VmaAllocator hAllocator,
3416 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3418 #if VMA_STATS_STRING_ENABLED 3423 class VmaDefragmentator
3425 const VmaAllocator m_hAllocator;
3426 VmaBlockVector*
const m_pBlockVector;
3427 uint32_t m_CurrentFrameIndex;
3428 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3429 VkDeviceSize m_BytesMoved;
3430 uint32_t m_AllocationsMoved;
3432 struct AllocationInfo
3434 VmaAllocation m_hAllocation;
3435 VkBool32* m_pChanged;
3438 m_hAllocation(VK_NULL_HANDLE),
3439 m_pChanged(VMA_NULL)
3444 struct AllocationInfoSizeGreater
3446 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3448 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3453 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3457 VmaDeviceMemoryBlock* m_pBlock;
3458 bool m_HasNonMovableAllocations;
3459 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3461 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3463 m_HasNonMovableAllocations(true),
3464 m_Allocations(pAllocationCallbacks),
3465 m_pMappedDataForDefragmentation(VMA_NULL)
3469 void CalcHasNonMovableAllocations()
3471 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3472 const size_t defragmentAllocCount = m_Allocations.size();
3473 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3476 void SortAllocationsBySizeDescecnding()
3478 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3481 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3482 void Unmap(VmaAllocator hAllocator);
3486 void* m_pMappedDataForDefragmentation;
3489 struct BlockPointerLess
3491 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3493 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3495 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3497 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3503 struct BlockInfoCompareMoveDestination
3505 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3507 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3511 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3515 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3523 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3524 BlockInfoVector m_Blocks;
3526 VkResult DefragmentRound(
3527 VkDeviceSize maxBytesToMove,
3528 uint32_t maxAllocationsToMove);
3530 static bool MoveMakesSense(
3531 size_t dstBlockIndex, VkDeviceSize dstOffset,
3532 size_t srcBlockIndex, VkDeviceSize srcOffset);
3536 VmaAllocator hAllocator,
3537 VmaBlockVector* pBlockVector,
3538 uint32_t currentFrameIndex);
3540 ~VmaDefragmentator();
3542 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3543 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3545 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3547 VkResult Defragment(
3548 VkDeviceSize maxBytesToMove,
3549 uint32_t maxAllocationsToMove);
3553 struct VmaAllocator_T
3556 bool m_UseKhrDedicatedAllocation;
3558 bool m_AllocationCallbacksSpecified;
3559 VkAllocationCallbacks m_AllocationCallbacks;
3563 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
3566 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3567 VMA_MUTEX m_HeapSizeLimitMutex;
3569 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3570 VkPhysicalDeviceMemoryProperties m_MemProps;
3573 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3576 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3577 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3578 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3583 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3585 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3589 return m_VulkanFunctions;
3592 VkDeviceSize GetBufferImageGranularity()
const 3595 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3596 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3599 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3600 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3602 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3604 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3605 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3608 void GetBufferMemoryRequirements(
3610 VkMemoryRequirements& memReq,
3611 bool& requiresDedicatedAllocation,
3612 bool& prefersDedicatedAllocation)
const;
3613 void GetImageMemoryRequirements(
3615 VkMemoryRequirements& memReq,
3616 bool& requiresDedicatedAllocation,
3617 bool& prefersDedicatedAllocation)
const;
3620 VkResult AllocateMemory(
3621 const VkMemoryRequirements& vkMemReq,
3622 bool requiresDedicatedAllocation,
3623 bool prefersDedicatedAllocation,
3624 VkBuffer dedicatedBuffer,
3625 VkImage dedicatedImage,
3627 VmaSuballocationType suballocType,
3628 VmaAllocation* pAllocation);
3631 void FreeMemory(
const VmaAllocation allocation);
3633 void CalculateStats(
VmaStats* pStats);
3635 #if VMA_STATS_STRING_ENABLED 3636 void PrintDetailedMap(
class VmaJsonWriter& json);
3639 void UnmapPersistentlyMappedMemory();
3640 VkResult MapPersistentlyMappedMemory();
3642 VkResult Defragment(
3643 VmaAllocation* pAllocations,
3644 size_t allocationCount,
3645 VkBool32* pAllocationsChanged,
3649 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3652 void DestroyPool(VmaPool pool);
3653 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3655 void SetCurrentFrameIndex(uint32_t frameIndex);
3657 void MakePoolAllocationsLost(
3659 size_t* pLostAllocationCount);
3661 void CreateLostAllocation(VmaAllocation* pAllocation);
3663 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3664 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3667 VkDeviceSize m_PreferredLargeHeapBlockSize;
3668 VkDeviceSize m_PreferredSmallHeapBlockSize;
3670 VkPhysicalDevice m_PhysicalDevice;
3671 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3673 VMA_MUTEX m_PoolsMutex;
3675 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3681 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3683 VkResult AllocateMemoryOfType(
3684 const VkMemoryRequirements& vkMemReq,
3685 bool dedicatedAllocation,
3686 VkBuffer dedicatedBuffer,
3687 VkImage dedicatedImage,
3689 uint32_t memTypeIndex,
3690 VmaSuballocationType suballocType,
3691 VmaAllocation* pAllocation);
3694 VkResult AllocateDedicatedMemory(
3696 VmaSuballocationType suballocType,
3697 uint32_t memTypeIndex,
3700 VkBuffer dedicatedBuffer,
3701 VkImage dedicatedImage,
3702 VmaAllocation* pAllocation);
3705 void FreeDedicatedMemory(VmaAllocation allocation);
3711 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3713 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3716 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3718 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3721 template<
typename T>
3722 static T* VmaAllocate(VmaAllocator hAllocator)
3724 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3727 template<
typename T>
3728 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3730 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3733 template<
typename T>
3734 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3739 VmaFree(hAllocator, ptr);
3743 template<
typename T>
3744 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3748 for(
size_t i = count; i--; )
3750 VmaFree(hAllocator, ptr);
3757 #if VMA_STATS_STRING_ENABLED 3759 class VmaStringBuilder
3762 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3763 size_t GetLength()
const {
return m_Data.size(); }
3764 const char* GetData()
const {
return m_Data.data(); }
3766 void Add(
char ch) { m_Data.push_back(ch); }
3767 void Add(
const char* pStr);
3768 void AddNewLine() { Add(
'\n'); }
3769 void AddNumber(uint32_t num);
3770 void AddNumber(uint64_t num);
3771 void AddPointer(
const void* ptr);
3774 VmaVector< char, VmaStlAllocator<char> > m_Data;
3777 void VmaStringBuilder::Add(
const char* pStr)
3779 const size_t strLen = strlen(pStr);
3782 const size_t oldCount = m_Data.size();
3783 m_Data.resize(oldCount + strLen);
3784 memcpy(m_Data.data() + oldCount, pStr, strLen);
3788 void VmaStringBuilder::AddNumber(uint32_t num)
3791 VmaUint32ToStr(buf,
sizeof(buf), num);
3795 void VmaStringBuilder::AddNumber(uint64_t num)
3798 VmaUint64ToStr(buf,
sizeof(buf), num);
3802 void VmaStringBuilder::AddPointer(
const void* ptr)
3805 VmaPtrToStr(buf,
sizeof(buf), ptr);
3809 #endif // #if VMA_STATS_STRING_ENABLED 3814 #if VMA_STATS_STRING_ENABLED 3819 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3822 void BeginObject(
bool singleLine =
false);
3825 void BeginArray(
bool singleLine =
false);
3828 void WriteString(
const char* pStr);
3829 void BeginString(
const char* pStr = VMA_NULL);
3830 void ContinueString(
const char* pStr);
3831 void ContinueString(uint32_t n);
3832 void ContinueString(uint64_t n);
3833 void EndString(
const char* pStr = VMA_NULL);
3835 void WriteNumber(uint32_t n);
3836 void WriteNumber(uint64_t n);
3837 void WriteBool(
bool b);
3841 static const char*
const INDENT;
3843 enum COLLECTION_TYPE
3845 COLLECTION_TYPE_OBJECT,
3846 COLLECTION_TYPE_ARRAY,
3850 COLLECTION_TYPE type;
3851 uint32_t valueCount;
3852 bool singleLineMode;
3855 VmaStringBuilder& m_SB;
3856 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3857 bool m_InsideString;
3859 void BeginValue(
bool isString);
3860 void WriteIndent(
bool oneLess =
false);
3863 const char*
const VmaJsonWriter::INDENT =
" ";
3865 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3867 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3868 m_InsideString(false)
3872 VmaJsonWriter::~VmaJsonWriter()
3874 VMA_ASSERT(!m_InsideString);
3875 VMA_ASSERT(m_Stack.empty());
3878 void VmaJsonWriter::BeginObject(
bool singleLine)
3880 VMA_ASSERT(!m_InsideString);
3886 item.type = COLLECTION_TYPE_OBJECT;
3887 item.valueCount = 0;
3888 item.singleLineMode = singleLine;
3889 m_Stack.push_back(item);
3892 void VmaJsonWriter::EndObject()
3894 VMA_ASSERT(!m_InsideString);
3899 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3903 void VmaJsonWriter::BeginArray(
bool singleLine)
3905 VMA_ASSERT(!m_InsideString);
3911 item.type = COLLECTION_TYPE_ARRAY;
3912 item.valueCount = 0;
3913 item.singleLineMode = singleLine;
3914 m_Stack.push_back(item);
3917 void VmaJsonWriter::EndArray()
3919 VMA_ASSERT(!m_InsideString);
3924 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3928 void VmaJsonWriter::WriteString(
const char* pStr)
3934 void VmaJsonWriter::BeginString(
const char* pStr)
3936 VMA_ASSERT(!m_InsideString);
3940 m_InsideString =
true;
3941 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3943 ContinueString(pStr);
3947 void VmaJsonWriter::ContinueString(
const char* pStr)
3949 VMA_ASSERT(m_InsideString);
3951 const size_t strLen = strlen(pStr);
3952 for(
size_t i = 0; i < strLen; ++i)
3979 VMA_ASSERT(0 &&
"Character not currently supported.");
3985 void VmaJsonWriter::ContinueString(uint32_t n)
3987 VMA_ASSERT(m_InsideString);
3991 void VmaJsonWriter::ContinueString(uint64_t n)
3993 VMA_ASSERT(m_InsideString);
3997 void VmaJsonWriter::EndString(
const char* pStr)
3999 VMA_ASSERT(m_InsideString);
4000 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4002 ContinueString(pStr);
4005 m_InsideString =
false;
4008 void VmaJsonWriter::WriteNumber(uint32_t n)
4010 VMA_ASSERT(!m_InsideString);
4015 void VmaJsonWriter::WriteNumber(uint64_t n)
4017 VMA_ASSERT(!m_InsideString);
4022 void VmaJsonWriter::WriteBool(
bool b)
4024 VMA_ASSERT(!m_InsideString);
4026 m_SB.Add(b ?
"true" :
"false");
4029 void VmaJsonWriter::WriteNull()
4031 VMA_ASSERT(!m_InsideString);
4036 void VmaJsonWriter::BeginValue(
bool isString)
4038 if(!m_Stack.empty())
4040 StackItem& currItem = m_Stack.back();
4041 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4042 currItem.valueCount % 2 == 0)
4044 VMA_ASSERT(isString);
4047 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4048 currItem.valueCount % 2 != 0)
4052 else if(currItem.valueCount > 0)
4061 ++currItem.valueCount;
4065 void VmaJsonWriter::WriteIndent(
bool oneLess)
4067 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4071 size_t count = m_Stack.size();
4072 if(count > 0 && oneLess)
4076 for(
size_t i = 0; i < count; ++i)
4083 #endif // #if VMA_STATS_STRING_ENABLED 4087 VkDeviceSize VmaAllocation_T::GetOffset()
const 4091 case ALLOCATION_TYPE_BLOCK:
4092 return m_BlockAllocation.m_Offset;
4093 case ALLOCATION_TYPE_DEDICATED:
4101 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4105 case ALLOCATION_TYPE_BLOCK:
4106 return m_BlockAllocation.m_Block->m_hMemory;
4107 case ALLOCATION_TYPE_DEDICATED:
4108 return m_DedicatedAllocation.m_hMemory;
4111 return VK_NULL_HANDLE;
4115 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4119 case ALLOCATION_TYPE_BLOCK:
4120 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4121 case ALLOCATION_TYPE_DEDICATED:
4122 return m_DedicatedAllocation.m_MemoryTypeIndex;
4129 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 4133 case ALLOCATION_TYPE_BLOCK:
4134 return m_BlockAllocation.m_Block->m_BlockVectorType;
4135 case ALLOCATION_TYPE_DEDICATED:
4136 return (m_DedicatedAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
4139 return VMA_BLOCK_VECTOR_TYPE_COUNT;
4143 void* VmaAllocation_T::GetMappedData()
const 4147 case ALLOCATION_TYPE_BLOCK:
4148 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
4150 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
4157 case ALLOCATION_TYPE_DEDICATED:
4158 return m_DedicatedAllocation.m_pMappedData;
4165 bool VmaAllocation_T::CanBecomeLost()
const 4169 case ALLOCATION_TYPE_BLOCK:
4170 return m_BlockAllocation.m_CanBecomeLost;
4171 case ALLOCATION_TYPE_DEDICATED:
4179 VmaPool VmaAllocation_T::GetPool()
const 4181 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4182 return m_BlockAllocation.m_hPool;
4185 VkResult VmaAllocation_T::DedicatedAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator)
4187 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4188 if(m_DedicatedAllocation.m_PersistentMap)
4190 return (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4191 hAllocator->m_hDevice,
4192 m_DedicatedAllocation.m_hMemory,
4196 &m_DedicatedAllocation.m_pMappedData);
4200 void VmaAllocation_T::DedicatedAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator)
4202 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4203 if(m_DedicatedAllocation.m_pMappedData)
4205 VMA_ASSERT(m_DedicatedAllocation.m_PersistentMap);
4206 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_DedicatedAllocation.m_hMemory);
4207 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4212 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4214 VMA_ASSERT(CanBecomeLost());
4220 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4223 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4228 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4234 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4244 #if VMA_STATS_STRING_ENABLED 4247 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4256 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4260 json.WriteString(
"Blocks");
4263 json.WriteString(
"Allocations");
4266 json.WriteString(
"UnusedRanges");
4269 json.WriteString(
"UsedBytes");
4272 json.WriteString(
"UnusedBytes");
4277 json.WriteString(
"AllocationSize");
4278 json.BeginObject(
true);
4279 json.WriteString(
"Min");
4281 json.WriteString(
"Avg");
4283 json.WriteString(
"Max");
4290 json.WriteString(
"UnusedRangeSize");
4291 json.BeginObject(
true);
4292 json.WriteString(
"Min");
4294 json.WriteString(
"Avg");
4296 json.WriteString(
"Max");
4304 #endif // #if VMA_STATS_STRING_ENABLED 4306 struct VmaSuballocationItemSizeLess
4309 const VmaSuballocationList::iterator lhs,
4310 const VmaSuballocationList::iterator rhs)
const 4312 return lhs->size < rhs->size;
4315 const VmaSuballocationList::iterator lhs,
4316 VkDeviceSize rhsSize)
const 4318 return lhs->size < rhsSize;
4325 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4329 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4330 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4334 VmaBlockMetadata::~VmaBlockMetadata()
4338 void VmaBlockMetadata::Init(VkDeviceSize size)
4342 m_SumFreeSize = size;
4344 VmaSuballocation suballoc = {};
4345 suballoc.offset = 0;
4346 suballoc.size = size;
4347 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4348 suballoc.hAllocation = VK_NULL_HANDLE;
4350 m_Suballocations.push_back(suballoc);
4351 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4353 m_FreeSuballocationsBySize.push_back(suballocItem);
4356 bool VmaBlockMetadata::Validate()
const 4358 if(m_Suballocations.empty())
4364 VkDeviceSize calculatedOffset = 0;
4366 uint32_t calculatedFreeCount = 0;
4368 VkDeviceSize calculatedSumFreeSize = 0;
4371 size_t freeSuballocationsToRegister = 0;
4373 bool prevFree =
false;
4375 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4376 suballocItem != m_Suballocations.cend();
4379 const VmaSuballocation& subAlloc = *suballocItem;
4382 if(subAlloc.offset != calculatedOffset)
4387 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4389 if(prevFree && currFree)
4393 prevFree = currFree;
4395 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4402 calculatedSumFreeSize += subAlloc.size;
4403 ++calculatedFreeCount;
4404 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4406 ++freeSuballocationsToRegister;
4410 calculatedOffset += subAlloc.size;
4415 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4420 VkDeviceSize lastSize = 0;
4421 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4423 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4426 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4431 if(suballocItem->size < lastSize)
4436 lastSize = suballocItem->size;
4441 ValidateFreeSuballocationList() &&
4442 (calculatedOffset == m_Size) &&
4443 (calculatedSumFreeSize == m_SumFreeSize) &&
4444 (calculatedFreeCount == m_FreeCount);
4447 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4449 if(!m_FreeSuballocationsBySize.empty())
4451 return m_FreeSuballocationsBySize.back()->size;
4459 bool VmaBlockMetadata::IsEmpty()
const 4461 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4464 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4468 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4480 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4481 suballocItem != m_Suballocations.cend();
4484 const VmaSuballocation& suballoc = *suballocItem;
4485 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4498 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4500 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4502 inoutStats.
size += m_Size;
4509 #if VMA_STATS_STRING_ENABLED 4511 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4515 json.WriteString(
"TotalBytes");
4516 json.WriteNumber(m_Size);
4518 json.WriteString(
"UnusedBytes");
4519 json.WriteNumber(m_SumFreeSize);
4521 json.WriteString(
"Allocations");
4522 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4524 json.WriteString(
"UnusedRanges");
4525 json.WriteNumber(m_FreeCount);
4527 json.WriteString(
"Suballocations");
4530 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4531 suballocItem != m_Suballocations.cend();
4532 ++suballocItem, ++i)
4534 json.BeginObject(
true);
4536 json.WriteString(
"Type");
4537 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4539 json.WriteString(
"Size");
4540 json.WriteNumber(suballocItem->size);
4542 json.WriteString(
"Offset");
4543 json.WriteNumber(suballocItem->offset);
4552 #endif // #if VMA_STATS_STRING_ENABLED 4564 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4566 VMA_ASSERT(IsEmpty());
4567 pAllocationRequest->offset = 0;
4568 pAllocationRequest->sumFreeSize = m_SumFreeSize;
4569 pAllocationRequest->sumItemSize = 0;
4570 pAllocationRequest->item = m_Suballocations.begin();
4571 pAllocationRequest->itemsToMakeLostCount = 0;
4574 bool VmaBlockMetadata::CreateAllocationRequest(
4575 uint32_t currentFrameIndex,
4576 uint32_t frameInUseCount,
4577 VkDeviceSize bufferImageGranularity,
4578 VkDeviceSize allocSize,
4579 VkDeviceSize allocAlignment,
4580 VmaSuballocationType allocType,
4581 bool canMakeOtherLost,
4582 VmaAllocationRequest* pAllocationRequest)
4584 VMA_ASSERT(allocSize > 0);
4585 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4586 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4587 VMA_HEAVY_ASSERT(Validate());
4590 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4596 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4597 if(freeSuballocCount > 0)
4602 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4603 m_FreeSuballocationsBySize.data(),
4604 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4606 VmaSuballocationItemSizeLess());
4607 size_t index = it - m_FreeSuballocationsBySize.data();
4608 for(; index < freeSuballocCount; ++index)
4613 bufferImageGranularity,
4617 m_FreeSuballocationsBySize[index],
4619 &pAllocationRequest->offset,
4620 &pAllocationRequest->itemsToMakeLostCount,
4621 &pAllocationRequest->sumFreeSize,
4622 &pAllocationRequest->sumItemSize))
4624 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4632 for(
size_t index = freeSuballocCount; index--; )
4637 bufferImageGranularity,
4641 m_FreeSuballocationsBySize[index],
4643 &pAllocationRequest->offset,
4644 &pAllocationRequest->itemsToMakeLostCount,
4645 &pAllocationRequest->sumFreeSize,
4646 &pAllocationRequest->sumItemSize))
4648 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4655 if(canMakeOtherLost)
4659 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4660 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4662 VmaAllocationRequest tmpAllocRequest = {};
4663 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4664 suballocIt != m_Suballocations.end();
4667 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4668 suballocIt->hAllocation->CanBecomeLost())
4673 bufferImageGranularity,
4679 &tmpAllocRequest.offset,
4680 &tmpAllocRequest.itemsToMakeLostCount,
4681 &tmpAllocRequest.sumFreeSize,
4682 &tmpAllocRequest.sumItemSize))
4684 tmpAllocRequest.item = suballocIt;
4686 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4688 *pAllocationRequest = tmpAllocRequest;
4694 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4703 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
4704 uint32_t currentFrameIndex,
4705 uint32_t frameInUseCount,
4706 VmaAllocationRequest* pAllocationRequest)
4708 while(pAllocationRequest->itemsToMakeLostCount > 0)
4710 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4712 ++pAllocationRequest->item;
4714 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4715 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4716 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4717 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4719 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4720 --pAllocationRequest->itemsToMakeLostCount;
4728 VMA_HEAVY_ASSERT(Validate());
4729 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4730 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4735 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4737 uint32_t lostAllocationCount = 0;
4738 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4739 it != m_Suballocations.end();
4742 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4743 it->hAllocation->CanBecomeLost() &&
4744 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4746 it = FreeSuballocation(it);
4747 ++lostAllocationCount;
4750 return lostAllocationCount;
4753 void VmaBlockMetadata::Alloc(
4754 const VmaAllocationRequest& request,
4755 VmaSuballocationType type,
4756 VkDeviceSize allocSize,
4757 VmaAllocation hAllocation)
4759 VMA_ASSERT(request.item != m_Suballocations.end());
4760 VmaSuballocation& suballoc = *request.item;
4762 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4764 VMA_ASSERT(request.offset >= suballoc.offset);
4765 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4766 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4767 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4771 UnregisterFreeSuballocation(request.item);
4773 suballoc.offset = request.offset;
4774 suballoc.size = allocSize;
4775 suballoc.type = type;
4776 suballoc.hAllocation = hAllocation;
4781 VmaSuballocation paddingSuballoc = {};
4782 paddingSuballoc.offset = request.offset + allocSize;
4783 paddingSuballoc.size = paddingEnd;
4784 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4785 VmaSuballocationList::iterator next = request.item;
4787 const VmaSuballocationList::iterator paddingEndItem =
4788 m_Suballocations.insert(next, paddingSuballoc);
4789 RegisterFreeSuballocation(paddingEndItem);
4795 VmaSuballocation paddingSuballoc = {};
4796 paddingSuballoc.offset = request.offset - paddingBegin;
4797 paddingSuballoc.size = paddingBegin;
4798 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4799 const VmaSuballocationList::iterator paddingBeginItem =
4800 m_Suballocations.insert(request.item, paddingSuballoc);
4801 RegisterFreeSuballocation(paddingBeginItem);
4805 m_FreeCount = m_FreeCount - 1;
4806 if(paddingBegin > 0)
4814 m_SumFreeSize -= allocSize;
4817 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
4819 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4820 suballocItem != m_Suballocations.end();
4823 VmaSuballocation& suballoc = *suballocItem;
4824 if(suballoc.hAllocation == allocation)
4826 FreeSuballocation(suballocItem);
4827 VMA_HEAVY_ASSERT(Validate());
4831 VMA_ASSERT(0 &&
"Not found!");
4834 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 4836 VkDeviceSize lastSize = 0;
4837 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
4839 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
4841 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
4846 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4851 if(it->size < lastSize)
4857 lastSize = it->size;
4862 bool VmaBlockMetadata::CheckAllocation(
4863 uint32_t currentFrameIndex,
4864 uint32_t frameInUseCount,
4865 VkDeviceSize bufferImageGranularity,
4866 VkDeviceSize allocSize,
4867 VkDeviceSize allocAlignment,
4868 VmaSuballocationType allocType,
4869 VmaSuballocationList::const_iterator suballocItem,
4870 bool canMakeOtherLost,
4871 VkDeviceSize* pOffset,
4872 size_t* itemsToMakeLostCount,
4873 VkDeviceSize* pSumFreeSize,
4874 VkDeviceSize* pSumItemSize)
const 4876 VMA_ASSERT(allocSize > 0);
4877 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4878 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4879 VMA_ASSERT(pOffset != VMA_NULL);
4881 *itemsToMakeLostCount = 0;
4885 if(canMakeOtherLost)
4887 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4889 *pSumFreeSize = suballocItem->size;
4893 if(suballocItem->hAllocation->CanBecomeLost() &&
4894 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4896 ++*itemsToMakeLostCount;
4897 *pSumItemSize = suballocItem->size;
4906 if(m_Size - suballocItem->offset < allocSize)
4912 *pOffset = suballocItem->offset;
4915 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4917 *pOffset += VMA_DEBUG_MARGIN;
4921 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4922 *pOffset = VmaAlignUp(*pOffset, alignment);
4926 if(bufferImageGranularity > 1)
4928 bool bufferImageGranularityConflict =
false;
4929 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4930 while(prevSuballocItem != m_Suballocations.cbegin())
4933 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4934 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4936 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4938 bufferImageGranularityConflict =
true;
4946 if(bufferImageGranularityConflict)
4948 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4954 if(*pOffset >= suballocItem->offset + suballocItem->size)
4960 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
4963 VmaSuballocationList::const_iterator next = suballocItem;
4965 const VkDeviceSize requiredEndMargin =
4966 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4968 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
4970 if(suballocItem->offset + totalSize > m_Size)
4977 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
4978 if(totalSize > suballocItem->size)
4980 VkDeviceSize remainingSize = totalSize - suballocItem->size;
4981 while(remainingSize > 0)
4984 if(lastSuballocItem == m_Suballocations.cend())
4988 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4990 *pSumFreeSize += lastSuballocItem->size;
4994 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
4995 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
4996 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4998 ++*itemsToMakeLostCount;
4999 *pSumItemSize += lastSuballocItem->size;
5006 remainingSize = (lastSuballocItem->size < remainingSize) ?
5007 remainingSize - lastSuballocItem->size : 0;
5013 if(bufferImageGranularity > 1)
5015 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5017 while(nextSuballocItem != m_Suballocations.cend())
5019 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5020 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5022 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5024 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5025 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5026 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5028 ++*itemsToMakeLostCount;
5047 const VmaSuballocation& suballoc = *suballocItem;
5048 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5050 *pSumFreeSize = suballoc.size;
5053 if(suballoc.size < allocSize)
5059 *pOffset = suballoc.offset;
5062 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5064 *pOffset += VMA_DEBUG_MARGIN;
5068 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5069 *pOffset = VmaAlignUp(*pOffset, alignment);
5073 if(bufferImageGranularity > 1)
5075 bool bufferImageGranularityConflict =
false;
5076 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5077 while(prevSuballocItem != m_Suballocations.cbegin())
5080 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5081 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5083 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5085 bufferImageGranularityConflict =
true;
5093 if(bufferImageGranularityConflict)
5095 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5100 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5103 VmaSuballocationList::const_iterator next = suballocItem;
5105 const VkDeviceSize requiredEndMargin =
5106 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5109 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5116 if(bufferImageGranularity > 1)
5118 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5120 while(nextSuballocItem != m_Suballocations.cend())
5122 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5123 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5125 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5144 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5146 VMA_ASSERT(item != m_Suballocations.end());
5147 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5149 VmaSuballocationList::iterator nextItem = item;
5151 VMA_ASSERT(nextItem != m_Suballocations.end());
5152 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5154 item->size += nextItem->size;
5156 m_Suballocations.erase(nextItem);
5159 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5162 VmaSuballocation& suballoc = *suballocItem;
5163 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5164 suballoc.hAllocation = VK_NULL_HANDLE;
5168 m_SumFreeSize += suballoc.size;
5171 bool mergeWithNext =
false;
5172 bool mergeWithPrev =
false;
5174 VmaSuballocationList::iterator nextItem = suballocItem;
5176 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5178 mergeWithNext =
true;
5181 VmaSuballocationList::iterator prevItem = suballocItem;
5182 if(suballocItem != m_Suballocations.begin())
5185 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5187 mergeWithPrev =
true;
5193 UnregisterFreeSuballocation(nextItem);
5194 MergeFreeWithNext(suballocItem);
5199 UnregisterFreeSuballocation(prevItem);
5200 MergeFreeWithNext(prevItem);
5201 RegisterFreeSuballocation(prevItem);
5206 RegisterFreeSuballocation(suballocItem);
5207 return suballocItem;
5211 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5213 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5214 VMA_ASSERT(item->size > 0);
5218 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5220 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5222 if(m_FreeSuballocationsBySize.empty())
5224 m_FreeSuballocationsBySize.push_back(item);
5228 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5236 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5238 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5239 VMA_ASSERT(item->size > 0);
5243 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5245 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5247 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5248 m_FreeSuballocationsBySize.data(),
5249 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5251 VmaSuballocationItemSizeLess());
5252 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5253 index < m_FreeSuballocationsBySize.size();
5256 if(m_FreeSuballocationsBySize[index] == item)
5258 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5261 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5263 VMA_ASSERT(0 &&
"Not found.");
5272 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5273 m_MemoryTypeIndex(UINT32_MAX),
5274 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
5275 m_hMemory(VK_NULL_HANDLE),
5276 m_PersistentMap(false),
5277 m_pMappedData(VMA_NULL),
5278 m_Metadata(hAllocator)
5282 void VmaDeviceMemoryBlock::Init(
5283 uint32_t newMemoryTypeIndex,
5284 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
5285 VkDeviceMemory newMemory,
5286 VkDeviceSize newSize,
5290 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5292 m_MemoryTypeIndex = newMemoryTypeIndex;
5293 m_BlockVectorType = newBlockVectorType;
5294 m_hMemory = newMemory;
5295 m_PersistentMap = persistentMap;
5296 m_pMappedData = pMappedData;
5298 m_Metadata.Init(newSize);
5301 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5305 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5307 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5308 if(m_pMappedData != VMA_NULL)
5310 (allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, m_hMemory);
5311 m_pMappedData = VMA_NULL;
5314 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5315 m_hMemory = VK_NULL_HANDLE;
5318 bool VmaDeviceMemoryBlock::Validate()
const 5320 if((m_hMemory == VK_NULL_HANDLE) ||
5321 (m_Metadata.GetSize() == 0))
5326 return m_Metadata.Validate();
5331 memset(&outInfo, 0,
sizeof(outInfo));
5350 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5358 VmaPool_T::VmaPool_T(
5359 VmaAllocator hAllocator,
5363 createInfo.memoryTypeIndex,
5365 VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
5366 createInfo.blockSize,
5367 createInfo.minBlockCount,
5368 createInfo.maxBlockCount,
5370 createInfo.frameInUseCount,
5375 VmaPool_T::~VmaPool_T()
5379 #if VMA_STATS_STRING_ENABLED 5381 #endif // #if VMA_STATS_STRING_ENABLED 5383 VmaBlockVector::VmaBlockVector(
5384 VmaAllocator hAllocator,
5385 uint32_t memoryTypeIndex,
5386 VMA_BLOCK_VECTOR_TYPE blockVectorType,
5387 VkDeviceSize preferredBlockSize,
5388 size_t minBlockCount,
5389 size_t maxBlockCount,
5390 VkDeviceSize bufferImageGranularity,
5391 uint32_t frameInUseCount,
5392 bool isCustomPool) :
5393 m_hAllocator(hAllocator),
5394 m_MemoryTypeIndex(memoryTypeIndex),
5395 m_BlockVectorType(blockVectorType),
5396 m_PreferredBlockSize(preferredBlockSize),
5397 m_MinBlockCount(minBlockCount),
5398 m_MaxBlockCount(maxBlockCount),
5399 m_BufferImageGranularity(bufferImageGranularity),
5400 m_FrameInUseCount(frameInUseCount),
5401 m_IsCustomPool(isCustomPool),
5402 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5403 m_HasEmptyBlock(false),
5404 m_pDefragmentator(VMA_NULL)
5408 VmaBlockVector::~VmaBlockVector()
5410 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5412 for(
size_t i = m_Blocks.size(); i--; )
5414 m_Blocks[i]->Destroy(m_hAllocator);
5415 vma_delete(m_hAllocator, m_Blocks[i]);
5419 VkResult VmaBlockVector::CreateMinBlocks()
5421 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5423 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5424 if(res != VK_SUCCESS)
5432 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5440 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5442 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5444 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5446 VMA_HEAVY_ASSERT(pBlock->Validate());
5447 pBlock->m_Metadata.AddPoolStats(*pStats);
5451 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5453 VkResult VmaBlockVector::Allocate(
5454 VmaPool hCurrentPool,
5455 uint32_t currentFrameIndex,
5456 const VkMemoryRequirements& vkMemReq,
5458 VmaSuballocationType suballocType,
5459 VmaAllocation* pAllocation)
5462 if(createInfo.
pool != VK_NULL_HANDLE &&
5465 VMA_ASSERT(0 &&
"Usage of VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT must match VMA_POOL_CREATE_PERSISTENT_MAP_BIT.");
5466 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5469 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5473 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5475 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5476 VMA_ASSERT(pCurrBlock);
5477 VmaAllocationRequest currRequest = {};
5478 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5481 m_BufferImageGranularity,
5489 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5492 if(pCurrBlock->m_Metadata.IsEmpty())
5494 m_HasEmptyBlock =
false;
5497 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5498 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5499 (*pAllocation)->InitBlockAllocation(
5508 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5509 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5514 const bool canCreateNewBlock =
5516 (m_Blocks.size() < m_MaxBlockCount);
5519 if(canCreateNewBlock)
5522 VkDeviceSize blockSize = m_PreferredBlockSize;
5523 size_t newBlockIndex = 0;
5524 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5527 if(res < 0 && m_IsCustomPool ==
false)
5531 if(blockSize >= vkMemReq.size)
5533 res = CreateBlock(blockSize, &newBlockIndex);
5538 if(blockSize >= vkMemReq.size)
5540 res = CreateBlock(blockSize, &newBlockIndex);
5545 if(res == VK_SUCCESS)
5547 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5548 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
5551 VmaAllocationRequest allocRequest;
5552 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
5553 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5554 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5555 (*pAllocation)->InitBlockAllocation(
5558 allocRequest.offset,
5564 VMA_HEAVY_ASSERT(pBlock->Validate());
5565 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5574 if(canMakeOtherLost)
5576 uint32_t tryIndex = 0;
5577 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5579 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5580 VmaAllocationRequest bestRequest = {};
5581 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5585 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5587 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5588 VMA_ASSERT(pCurrBlock);
5589 VmaAllocationRequest currRequest = {};
5590 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5593 m_BufferImageGranularity,
5600 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5601 if(pBestRequestBlock == VMA_NULL ||
5602 currRequestCost < bestRequestCost)
5604 pBestRequestBlock = pCurrBlock;
5605 bestRequest = currRequest;
5606 bestRequestCost = currRequestCost;
5608 if(bestRequestCost == 0)
5616 if(pBestRequestBlock != VMA_NULL)
5618 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
5624 if(pBestRequestBlock->m_Metadata.IsEmpty())
5626 m_HasEmptyBlock =
false;
5629 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5630 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5631 (*pAllocation)->InitBlockAllocation(
5640 VMA_HEAVY_ASSERT(pBlock->Validate());
5641 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5655 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5657 return VK_ERROR_TOO_MANY_OBJECTS;
5661 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5664 void VmaBlockVector::Free(
5665 VmaAllocation hAllocation)
5667 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5671 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5673 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5675 pBlock->m_Metadata.Free(hAllocation);
5676 VMA_HEAVY_ASSERT(pBlock->Validate());
5678 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5681 if(pBlock->m_Metadata.IsEmpty())
5684 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5686 pBlockToDelete = pBlock;
5692 m_HasEmptyBlock =
true;
5697 else if(m_HasEmptyBlock)
5699 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
5700 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
5702 pBlockToDelete = pLastBlock;
5703 m_Blocks.pop_back();
5704 m_HasEmptyBlock =
false;
5708 IncrementallySortBlocks();
5713 if(pBlockToDelete != VMA_NULL)
5715 VMA_DEBUG_LOG(
" Deleted empty allocation");
5716 pBlockToDelete->Destroy(m_hAllocator);
5717 vma_delete(m_hAllocator, pBlockToDelete);
5721 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5723 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5725 if(m_Blocks[blockIndex] == pBlock)
5727 VmaVectorRemove(m_Blocks, blockIndex);
5734 void VmaBlockVector::IncrementallySortBlocks()
5737 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5739 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
5741 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5747 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5749 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5750 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5751 allocInfo.allocationSize = blockSize;
5752 VkDeviceMemory mem = VK_NULL_HANDLE;
5753 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
5762 void* pMappedData = VMA_NULL;
5763 const bool persistentMap = (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED);
5764 if(persistentMap && m_hAllocator->m_UnmapPersistentlyMappedMemoryCounter == 0)
5766 res = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5767 m_hAllocator->m_hDevice,
5775 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
5776 m_hAllocator->FreeVulkanMemory(m_MemoryTypeIndex, blockSize, mem);
5782 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5785 (VMA_BLOCK_VECTOR_TYPE)m_BlockVectorType,
5787 allocInfo.allocationSize,
5791 m_Blocks.push_back(pBlock);
5792 if(pNewBlockIndex != VMA_NULL)
5794 *pNewBlockIndex = m_Blocks.size() - 1;
5800 #if VMA_STATS_STRING_ENABLED 5802 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5804 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5810 json.WriteString(
"MemoryTypeIndex");
5811 json.WriteNumber(m_MemoryTypeIndex);
5813 if(m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
5815 json.WriteString(
"Mapped");
5816 json.WriteBool(
true);
5819 json.WriteString(
"BlockSize");
5820 json.WriteNumber(m_PreferredBlockSize);
5822 json.WriteString(
"BlockCount");
5823 json.BeginObject(
true);
5824 if(m_MinBlockCount > 0)
5826 json.WriteString(
"Min");
5827 json.WriteNumber(m_MinBlockCount);
5829 if(m_MaxBlockCount < SIZE_MAX)
5831 json.WriteString(
"Max");
5832 json.WriteNumber(m_MaxBlockCount);
5834 json.WriteString(
"Cur");
5835 json.WriteNumber(m_Blocks.size());
5838 if(m_FrameInUseCount > 0)
5840 json.WriteString(
"FrameInUseCount");
5841 json.WriteNumber(m_FrameInUseCount);
5846 json.WriteString(
"PreferredBlockSize");
5847 json.WriteNumber(m_PreferredBlockSize);
5850 json.WriteString(
"Blocks");
5852 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5854 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
5861 #endif // #if VMA_STATS_STRING_ENABLED 5863 void VmaBlockVector::UnmapPersistentlyMappedMemory()
5865 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5867 for(
size_t i = m_Blocks.size(); i--; )
5869 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5870 if(pBlock->m_pMappedData != VMA_NULL)
5872 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
5873 (m_hAllocator->GetVulkanFunctions().vkUnmapMemory)(m_hAllocator->m_hDevice, pBlock->m_hMemory);
5874 pBlock->m_pMappedData = VMA_NULL;
5879 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
5881 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5883 VkResult finalResult = VK_SUCCESS;
5884 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
5886 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5887 if(pBlock->m_PersistentMap)
5889 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
5890 VkResult localResult = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5891 m_hAllocator->m_hDevice,
5896 &pBlock->m_pMappedData);
5897 if(localResult != VK_SUCCESS)
5899 finalResult = localResult;
5906 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5907 VmaAllocator hAllocator,
5908 uint32_t currentFrameIndex)
5910 if(m_pDefragmentator == VMA_NULL)
5912 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5918 return m_pDefragmentator;
5921 VkResult VmaBlockVector::Defragment(
5923 VkDeviceSize& maxBytesToMove,
5924 uint32_t& maxAllocationsToMove)
5926 if(m_pDefragmentator == VMA_NULL)
5931 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5934 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5937 if(pDefragmentationStats != VMA_NULL)
5939 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5940 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
5943 VMA_ASSERT(bytesMoved <= maxBytesToMove);
5944 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
5950 m_HasEmptyBlock =
false;
5951 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
5953 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
5954 if(pBlock->m_Metadata.IsEmpty())
5956 if(m_Blocks.size() > m_MinBlockCount)
5958 if(pDefragmentationStats != VMA_NULL)
5961 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
5964 VmaVectorRemove(m_Blocks, blockIndex);
5965 pBlock->Destroy(m_hAllocator);
5966 vma_delete(m_hAllocator, pBlock);
5970 m_HasEmptyBlock =
true;
5978 void VmaBlockVector::DestroyDefragmentator()
5980 if(m_pDefragmentator != VMA_NULL)
5982 vma_delete(m_hAllocator, m_pDefragmentator);
5983 m_pDefragmentator = VMA_NULL;
5987 void VmaBlockVector::MakePoolAllocationsLost(
5988 uint32_t currentFrameIndex,
5989 size_t* pLostAllocationCount)
5991 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5993 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5995 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5997 pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6001 void VmaBlockVector::AddStats(
VmaStats* pStats)
6003 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6004 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6006 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6008 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6010 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6012 VMA_HEAVY_ASSERT(pBlock->Validate());
6014 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6015 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6016 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6017 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6024 VmaDefragmentator::VmaDefragmentator(
6025 VmaAllocator hAllocator,
6026 VmaBlockVector* pBlockVector,
6027 uint32_t currentFrameIndex) :
6028 m_hAllocator(hAllocator),
6029 m_pBlockVector(pBlockVector),
6030 m_CurrentFrameIndex(currentFrameIndex),
6032 m_AllocationsMoved(0),
6033 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6034 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6038 VmaDefragmentator::~VmaDefragmentator()
6040 for(
size_t i = m_Blocks.size(); i--; )
6042 vma_delete(m_hAllocator, m_Blocks[i]);
6046 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6048 AllocationInfo allocInfo;
6049 allocInfo.m_hAllocation = hAlloc;
6050 allocInfo.m_pChanged = pChanged;
6051 m_Allocations.push_back(allocInfo);
6054 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6057 if(m_pMappedDataForDefragmentation)
6059 *ppMappedData = m_pMappedDataForDefragmentation;
6064 if(m_pBlock->m_PersistentMap)
6066 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
6067 *ppMappedData = m_pBlock->m_pMappedData;
6072 VkResult res = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6073 hAllocator->m_hDevice,
6074 m_pBlock->m_hMemory,
6078 &m_pMappedDataForDefragmentation);
6079 *ppMappedData = m_pMappedDataForDefragmentation;
6083 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6085 if(m_pMappedDataForDefragmentation != VMA_NULL)
6087 (hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_pBlock->m_hMemory);
6091 VkResult VmaDefragmentator::DefragmentRound(
6092 VkDeviceSize maxBytesToMove,
6093 uint32_t maxAllocationsToMove)
6095 if(m_Blocks.empty())
6100 size_t srcBlockIndex = m_Blocks.size() - 1;
6101 size_t srcAllocIndex = SIZE_MAX;
6107 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6109 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6112 if(srcBlockIndex == 0)
6119 srcAllocIndex = SIZE_MAX;
6124 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6128 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6129 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6131 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6132 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6133 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6134 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6137 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6139 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6140 VmaAllocationRequest dstAllocRequest;
6141 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6142 m_CurrentFrameIndex,
6143 m_pBlockVector->GetFrameInUseCount(),
6144 m_pBlockVector->GetBufferImageGranularity(),
6149 &dstAllocRequest) &&
6151 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6153 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6156 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6157 (m_BytesMoved + size > maxBytesToMove))
6159 return VK_INCOMPLETE;
6162 void* pDstMappedData = VMA_NULL;
6163 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6164 if(res != VK_SUCCESS)
6169 void* pSrcMappedData = VMA_NULL;
6170 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6171 if(res != VK_SUCCESS)
6178 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6179 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6180 static_cast<size_t>(size));
6182 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6183 pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6185 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6187 if(allocInfo.m_pChanged != VMA_NULL)
6189 *allocInfo.m_pChanged = VK_TRUE;
6192 ++m_AllocationsMoved;
6193 m_BytesMoved += size;
6195 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6203 if(srcAllocIndex > 0)
6209 if(srcBlockIndex > 0)
6212 srcAllocIndex = SIZE_MAX;
6222 VkResult VmaDefragmentator::Defragment(
6223 VkDeviceSize maxBytesToMove,
6224 uint32_t maxAllocationsToMove)
6226 if(m_Allocations.empty())
6232 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6233 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6235 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6236 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6237 m_Blocks.push_back(pBlockInfo);
6241 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6244 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6246 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6248 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6250 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6251 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6252 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6254 (*it)->m_Allocations.push_back(allocInfo);
6262 m_Allocations.clear();
6264 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6266 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6267 pBlockInfo->CalcHasNonMovableAllocations();
6268 pBlockInfo->SortAllocationsBySizeDescecnding();
6272 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6275 VkResult result = VK_SUCCESS;
6276 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6278 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6282 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6284 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6290 bool VmaDefragmentator::MoveMakesSense(
6291 size_t dstBlockIndex, VkDeviceSize dstOffset,
6292 size_t srcBlockIndex, VkDeviceSize srcOffset)
6294 if(dstBlockIndex < srcBlockIndex)
6298 if(dstBlockIndex > srcBlockIndex)
6302 if(dstOffset < srcOffset)
6315 m_PhysicalDevice(pCreateInfo->physicalDevice),
6316 m_hDevice(pCreateInfo->device),
6317 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6318 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6319 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6320 m_UnmapPersistentlyMappedMemoryCounter(0),
6321 m_PreferredLargeHeapBlockSize(0),
6322 m_PreferredSmallHeapBlockSize(0),
6323 m_CurrentFrameIndex(0),
6324 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6328 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6329 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6330 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6332 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6333 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
6335 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6337 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6348 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6349 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6358 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6360 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6361 if(limit != VK_WHOLE_SIZE)
6363 m_HeapSizeLimit[heapIndex] = limit;
6364 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6366 m_MemProps.memoryHeaps[heapIndex].size = limit;
6372 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6374 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6376 for(
size_t blockVectorTypeIndex = 0; blockVectorTypeIndex < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorTypeIndex)
6378 m_pBlockVectors[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, VmaBlockVector)(
6381 static_cast<VMA_BLOCK_VECTOR_TYPE
>(blockVectorTypeIndex),
6385 GetBufferImageGranularity(),
6390 m_pDedicatedAllocations[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6395 VmaAllocator_T::~VmaAllocator_T()
6397 VMA_ASSERT(m_Pools.empty());
6399 for(
size_t i = GetMemoryTypeCount(); i--; )
6401 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
6403 vma_delete(
this, m_pDedicatedAllocations[i][j]);
6404 vma_delete(
this, m_pBlockVectors[i][j]);
6409 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6411 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6412 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6413 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6414 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6415 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6416 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6417 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6418 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6419 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6420 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6421 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6422 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6423 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6424 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6425 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6428 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6430 #define VMA_COPY_IF_NOT_NULL(funcName) \ 6431 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 6433 if(pVulkanFunctions != VMA_NULL)
6435 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
6436 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
6437 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
6438 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
6439 VMA_COPY_IF_NOT_NULL(vkMapMemory);
6440 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
6441 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
6442 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
6443 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
6444 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
6445 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
6446 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
6447 VMA_COPY_IF_NOT_NULL(vkCreateImage);
6448 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
6449 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
6450 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
6453 #undef VMA_COPY_IF_NOT_NULL 6457 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6458 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6459 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6460 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6461 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6462 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6463 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6464 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6465 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6466 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6467 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6468 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6469 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6470 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6471 if(m_UseKhrDedicatedAllocation)
6473 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
6474 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
6478 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6480 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6481 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6482 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6483 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6486 VkResult VmaAllocator_T::AllocateMemoryOfType(
6487 const VkMemoryRequirements& vkMemReq,
6488 bool dedicatedAllocation,
6489 VkBuffer dedicatedBuffer,
6490 VkImage dedicatedImage,
6492 uint32_t memTypeIndex,
6493 VmaSuballocationType suballocType,
6494 VmaAllocation* pAllocation)
6496 VMA_ASSERT(pAllocation != VMA_NULL);
6497 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6499 uint32_t blockVectorType = VmaAllocationCreateFlagsToBlockVectorType(createInfo.
flags);
6500 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6501 VMA_ASSERT(blockVector);
6505 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6506 bool preferDedicatedMemory =
6507 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
6508 dedicatedAllocation ||
6510 vkMemReq.size > preferredBlockSize / 2;
6512 if(preferDedicatedMemory &&
6514 finalCreateInfo.
pool == VK_NULL_HANDLE)
6521 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
6530 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6534 return AllocateDedicatedMemory(
6547 VkResult res = blockVector->Allocate(
6549 m_CurrentFrameIndex.load(),
6554 if(res == VK_SUCCESS)
6562 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6566 res = AllocateDedicatedMemory(
6571 finalCreateInfo.pUserData,
6575 if(res == VK_SUCCESS)
6578 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
6584 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6591 VkResult VmaAllocator_T::AllocateDedicatedMemory(
6593 VmaSuballocationType suballocType,
6594 uint32_t memTypeIndex,
6597 VkBuffer dedicatedBuffer,
6598 VkImage dedicatedImage,
6599 VmaAllocation* pAllocation)
6601 VMA_ASSERT(pAllocation);
6603 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6604 allocInfo.memoryTypeIndex = memTypeIndex;
6605 allocInfo.allocationSize = size;
6607 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
6608 if(m_UseKhrDedicatedAllocation)
6610 if(dedicatedBuffer != VK_NULL_HANDLE)
6612 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
6613 dedicatedAllocInfo.buffer = dedicatedBuffer;
6614 allocInfo.pNext = &dedicatedAllocInfo;
6616 else if(dedicatedImage != VK_NULL_HANDLE)
6618 dedicatedAllocInfo.image = dedicatedImage;
6619 allocInfo.pNext = &dedicatedAllocInfo;
6624 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6625 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6628 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6632 void* pMappedData =
nullptr;
6635 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
6637 res = (*m_VulkanFunctions.vkMapMemory)(
6646 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6647 FreeVulkanMemory(memTypeIndex, size, hMemory);
6653 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6654 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
6658 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6659 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
6660 VMA_ASSERT(pDedicatedAllocations);
6661 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
6664 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
6669 void VmaAllocator_T::GetBufferMemoryRequirements(
6671 VkMemoryRequirements& memReq,
6672 bool& requiresDedicatedAllocation,
6673 bool& prefersDedicatedAllocation)
const 6675 if(m_UseKhrDedicatedAllocation)
6677 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
6678 memReqInfo.buffer = hBuffer;
6680 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
6682 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
6683 memReq2.pNext = &memDedicatedReq;
6685 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
6687 memReq = memReq2.memoryRequirements;
6688 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
6689 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
6693 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
6694 requiresDedicatedAllocation =
false;
6695 prefersDedicatedAllocation =
false;
6699 void VmaAllocator_T::GetImageMemoryRequirements(
6701 VkMemoryRequirements& memReq,
6702 bool& requiresDedicatedAllocation,
6703 bool& prefersDedicatedAllocation)
const 6705 if(m_UseKhrDedicatedAllocation)
6707 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
6708 memReqInfo.image = hImage;
6710 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
6712 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
6713 memReq2.pNext = &memDedicatedReq;
6715 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
6717 memReq = memReq2.memoryRequirements;
6718 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
6719 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
6723 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
6724 requiresDedicatedAllocation =
false;
6725 prefersDedicatedAllocation =
false;
6729 VkResult VmaAllocator_T::AllocateMemory(
6730 const VkMemoryRequirements& vkMemReq,
6731 bool requiresDedicatedAllocation,
6732 bool prefersDedicatedAllocation,
6733 VkBuffer dedicatedBuffer,
6734 VkImage dedicatedImage,
6736 VmaSuballocationType suballocType,
6737 VmaAllocation* pAllocation)
6742 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6743 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6745 if(requiresDedicatedAllocation)
6749 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
6750 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6752 if(createInfo.
pool != VK_NULL_HANDLE)
6754 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
6755 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6758 if((createInfo.
pool != VK_NULL_HANDLE) &&
6761 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
6762 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6765 if(createInfo.
pool != VK_NULL_HANDLE)
6767 return createInfo.
pool->m_BlockVector.Allocate(
6769 m_CurrentFrameIndex.load(),
6778 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6779 uint32_t memTypeIndex = UINT32_MAX;
6781 if(res == VK_SUCCESS)
6783 res = AllocateMemoryOfType(
6785 requiresDedicatedAllocation || prefersDedicatedAllocation,
6793 if(res == VK_SUCCESS)
6803 memoryTypeBits &= ~(1u << memTypeIndex);
6806 if(res == VK_SUCCESS)
6808 res = AllocateMemoryOfType(
6810 requiresDedicatedAllocation || prefersDedicatedAllocation,
6818 if(res == VK_SUCCESS)
6828 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6839 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6841 VMA_ASSERT(allocation);
6843 if(allocation->CanBecomeLost() ==
false ||
6844 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6846 switch(allocation->GetType())
6848 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6850 VmaBlockVector* pBlockVector = VMA_NULL;
6851 VmaPool hPool = allocation->GetPool();
6852 if(hPool != VK_NULL_HANDLE)
6854 pBlockVector = &hPool->m_BlockVector;
6858 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6859 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
6860 pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6862 pBlockVector->Free(allocation);
6865 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
6866 FreeDedicatedMemory(allocation);
6873 vma_delete(
this, allocation);
6876 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6879 InitStatInfo(pStats->
total);
6880 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6882 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6886 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6888 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6889 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6891 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6892 VMA_ASSERT(pBlockVector);
6893 pBlockVector->AddStats(pStats);
6899 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6900 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6902 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
6907 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6909 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6910 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6911 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6913 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex][blockVectorType];
6914 VMA_ASSERT(pDedicatedAllocVector);
6915 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
6918 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
6919 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6920 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6921 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6927 VmaPostprocessCalcStatInfo(pStats->
total);
6928 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6929 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6930 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6931 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6934 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6936 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
6938 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
6940 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6942 for(uint32_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
6944 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6945 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6946 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6950 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6951 AllocationVectorType* pDedicatedAllocationsVector = m_pDedicatedAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6952 for(
size_t dedicatedAllocIndex = pDedicatedAllocationsVector->size(); dedicatedAllocIndex--; )
6954 VmaAllocation hAlloc = (*pDedicatedAllocationsVector)[dedicatedAllocIndex];
6955 hAlloc->DedicatedAllocUnmapPersistentlyMappedMemory(
this);
6961 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6962 pBlockVector->UnmapPersistentlyMappedMemory();
6969 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6970 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6972 m_Pools[poolIndex]->GetBlockVector().UnmapPersistentlyMappedMemory();
6979 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
6981 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
6982 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
6984 VkResult finalResult = VK_SUCCESS;
6985 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6989 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6990 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6992 m_Pools[poolIndex]->GetBlockVector().MapPersistentlyMappedMemory();
6996 for(uint32_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
6998 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6999 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
7000 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7004 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7005 AllocationVectorType* pAllocationsVector = m_pDedicatedAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
7006 for(
size_t dedicatedAllocIndex = 0, dedicatedAllocCount = pAllocationsVector->size(); dedicatedAllocIndex < dedicatedAllocCount; ++dedicatedAllocIndex)
7008 VmaAllocation hAlloc = (*pAllocationsVector)[dedicatedAllocIndex];
7009 hAlloc->DedicatedAllocMapPersistentlyMappedMemory(
this);
7015 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
7016 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
7017 if(localResult != VK_SUCCESS)
7019 finalResult = localResult;
7031 VkResult VmaAllocator_T::Defragment(
7032 VmaAllocation* pAllocations,
7033 size_t allocationCount,
7034 VkBool32* pAllocationsChanged,
7038 if(pAllocationsChanged != VMA_NULL)
7040 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7042 if(pDefragmentationStats != VMA_NULL)
7044 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7047 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
7049 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
7050 return VK_ERROR_MEMORY_MAP_FAILED;
7053 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7055 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7057 const size_t poolCount = m_Pools.size();
7060 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7062 VmaAllocation hAlloc = pAllocations[allocIndex];
7064 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7066 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7068 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7070 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7072 VmaBlockVector* pAllocBlockVector =
nullptr;
7074 const VmaPool hAllocPool = hAlloc->GetPool();
7076 if(hAllocPool != VK_NULL_HANDLE)
7078 pAllocBlockVector = &hAllocPool->GetBlockVector();
7083 pAllocBlockVector = m_pBlockVectors[memTypeIndex][hAlloc->GetBlockVectorType()];
7086 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7088 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7089 &pAllocationsChanged[allocIndex] : VMA_NULL;
7090 pDefragmentator->AddAllocation(hAlloc, pChanged);
7094 VkResult result = VK_SUCCESS;
7098 VkDeviceSize maxBytesToMove = SIZE_MAX;
7099 uint32_t maxAllocationsToMove = UINT32_MAX;
7100 if(pDefragmentationInfo != VMA_NULL)
7107 for(uint32_t memTypeIndex = 0;
7108 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7112 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7114 for(uint32_t blockVectorType = 0;
7115 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
7118 result = m_pBlockVectors[memTypeIndex][blockVectorType]->Defragment(
7119 pDefragmentationStats,
7121 maxAllocationsToMove);
7127 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7129 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7130 pDefragmentationStats,
7132 maxAllocationsToMove);
7138 for(
size_t poolIndex = poolCount; poolIndex--; )
7140 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7144 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7146 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7148 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
7150 m_pBlockVectors[memTypeIndex][blockVectorType]->DestroyDefragmentator();
7158 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7160 if(hAllocation->CanBecomeLost())
7166 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7167 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7170 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7174 pAllocationInfo->
offset = 0;
7175 pAllocationInfo->
size = hAllocation->GetSize();
7177 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7180 else if(localLastUseFrameIndex == localCurrFrameIndex)
7182 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7183 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7184 pAllocationInfo->
offset = hAllocation->GetOffset();
7185 pAllocationInfo->
size = hAllocation->GetSize();
7186 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7187 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7192 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7194 localLastUseFrameIndex = localCurrFrameIndex;
7202 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7203 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7204 pAllocationInfo->
offset = hAllocation->GetOffset();
7205 pAllocationInfo->
size = hAllocation->GetSize();
7206 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7207 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7211 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7213 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7226 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7228 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7229 if(res != VK_SUCCESS)
7231 vma_delete(
this, *pPool);
7238 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7239 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7245 void VmaAllocator_T::DestroyPool(VmaPool pool)
7249 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7250 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7251 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7254 vma_delete(
this, pool);
7257 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7259 pool->m_BlockVector.GetPoolStats(pPoolStats);
7262 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7264 m_CurrentFrameIndex.store(frameIndex);
7267 void VmaAllocator_T::MakePoolAllocationsLost(
7269 size_t* pLostAllocationCount)
7271 hPool->m_BlockVector.MakePoolAllocationsLost(
7272 m_CurrentFrameIndex.load(),
7273 pLostAllocationCount);
7276 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7278 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
7279 (*pAllocation)->InitLost();
7282 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7284 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7287 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7289 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7290 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7292 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7293 if(res == VK_SUCCESS)
7295 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7300 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7305 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7308 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7310 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7316 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7318 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7320 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7323 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7325 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7326 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7328 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7329 m_HeapSizeLimit[heapIndex] += size;
7333 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7335 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7337 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7339 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7340 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex][allocation->GetBlockVectorType()];
7341 VMA_ASSERT(pDedicatedAllocations);
7342 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7343 VMA_ASSERT(success);
7346 VkDeviceMemory hMemory = allocation->GetMemory();
7348 if(allocation->GetMappedData() != VMA_NULL)
7350 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7353 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7355 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7358 #if VMA_STATS_STRING_ENABLED 7360 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7362 bool dedicatedAllocationsStarted =
false;
7363 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7365 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7366 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7368 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex][blockVectorType];
7369 VMA_ASSERT(pDedicatedAllocVector);
7370 if(pDedicatedAllocVector->empty() ==
false)
7372 if(dedicatedAllocationsStarted ==
false)
7374 dedicatedAllocationsStarted =
true;
7375 json.WriteString(
"DedicatedAllocations");
7379 json.BeginString(
"Type ");
7380 json.ContinueString(memTypeIndex);
7381 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7383 json.ContinueString(
" Mapped");
7389 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7391 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7392 json.BeginObject(
true);
7394 json.WriteString(
"Size");
7395 json.WriteNumber(hAlloc->GetSize());
7397 json.WriteString(
"Type");
7398 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7407 if(dedicatedAllocationsStarted)
7413 bool allocationsStarted =
false;
7414 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7416 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7418 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
7420 if(allocationsStarted ==
false)
7422 allocationsStarted =
true;
7423 json.WriteString(
"DefaultPools");
7427 json.BeginString(
"Type ");
7428 json.ContinueString(memTypeIndex);
7429 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7431 json.ContinueString(
" Mapped");
7435 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(json);
7439 if(allocationsStarted)
7446 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7447 const size_t poolCount = m_Pools.size();
7450 json.WriteString(
"Pools");
7452 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7454 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7461 #endif // #if VMA_STATS_STRING_ENABLED 7463 static VkResult AllocateMemoryForImage(
7464 VmaAllocator allocator,
7467 VmaSuballocationType suballocType,
7468 VmaAllocation* pAllocation)
7470 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7472 VkMemoryRequirements vkMemReq = {};
7473 bool requiresDedicatedAllocation =
false;
7474 bool prefersDedicatedAllocation =
false;
7475 allocator->GetImageMemoryRequirements(image, vkMemReq,
7476 requiresDedicatedAllocation, prefersDedicatedAllocation);
7478 return allocator->AllocateMemory(
7480 requiresDedicatedAllocation,
7481 prefersDedicatedAllocation,
7484 *pAllocationCreateInfo,
7494 VmaAllocator* pAllocator)
7496 VMA_ASSERT(pCreateInfo && pAllocator);
7497 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7503 VmaAllocator allocator)
7505 if(allocator != VK_NULL_HANDLE)
7507 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7508 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7509 vma_delete(&allocationCallbacks, allocator);
7514 VmaAllocator allocator,
7515 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7517 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7518 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7522 VmaAllocator allocator,
7523 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7525 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7526 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7530 VmaAllocator allocator,
7531 uint32_t memoryTypeIndex,
7532 VkMemoryPropertyFlags* pFlags)
7534 VMA_ASSERT(allocator && pFlags);
7535 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7536 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7540 VmaAllocator allocator,
7541 uint32_t frameIndex)
7543 VMA_ASSERT(allocator);
7544 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7546 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7548 allocator->SetCurrentFrameIndex(frameIndex);
7552 VmaAllocator allocator,
7555 VMA_ASSERT(allocator && pStats);
7556 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7557 allocator->CalculateStats(pStats);
7560 #if VMA_STATS_STRING_ENABLED 7563 VmaAllocator allocator,
7564 char** ppStatsString,
7565 VkBool32 detailedMap)
7567 VMA_ASSERT(allocator && ppStatsString);
7568 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7570 VmaStringBuilder sb(allocator);
7572 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7576 allocator->CalculateStats(&stats);
7578 json.WriteString(
"Total");
7579 VmaPrintStatInfo(json, stats.
total);
7581 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7583 json.BeginString(
"Heap ");
7584 json.ContinueString(heapIndex);
7588 json.WriteString(
"Size");
7589 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7591 json.WriteString(
"Flags");
7592 json.BeginArray(
true);
7593 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7595 json.WriteString(
"DEVICE_LOCAL");
7601 json.WriteString(
"Stats");
7602 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7605 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7607 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7609 json.BeginString(
"Type ");
7610 json.ContinueString(typeIndex);
7615 json.WriteString(
"Flags");
7616 json.BeginArray(
true);
7617 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7618 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7620 json.WriteString(
"DEVICE_LOCAL");
7622 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7624 json.WriteString(
"HOST_VISIBLE");
7626 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7628 json.WriteString(
"HOST_COHERENT");
7630 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7632 json.WriteString(
"HOST_CACHED");
7634 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7636 json.WriteString(
"LAZILY_ALLOCATED");
7642 json.WriteString(
"Stats");
7643 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7652 if(detailedMap == VK_TRUE)
7654 allocator->PrintDetailedMap(json);
7660 const size_t len = sb.GetLength();
7661 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7664 memcpy(pChars, sb.GetData(), len);
7667 *ppStatsString = pChars;
7671 VmaAllocator allocator,
7674 if(pStatsString != VMA_NULL)
7676 VMA_ASSERT(allocator);
7677 size_t len = strlen(pStatsString);
7678 vma_delete_array(allocator, pStatsString, len + 1);
7682 #endif // #if VMA_STATS_STRING_ENABLED 7687 VmaAllocator allocator,
7688 uint32_t memoryTypeBits,
7690 uint32_t* pMemoryTypeIndex)
7692 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7693 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7694 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7696 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7698 if(preferredFlags == 0)
7700 preferredFlags = requiredFlags;
7703 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7706 switch(pAllocationCreateInfo->
usage)
7711 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7714 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7717 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7718 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7721 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7722 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7728 *pMemoryTypeIndex = UINT32_MAX;
7729 uint32_t minCost = UINT32_MAX;
7730 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7731 memTypeIndex < allocator->GetMemoryTypeCount();
7732 ++memTypeIndex, memTypeBit <<= 1)
7735 if((memTypeBit & memoryTypeBits) != 0)
7737 const VkMemoryPropertyFlags currFlags =
7738 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7740 if((requiredFlags & ~currFlags) == 0)
7743 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7745 if(currCost < minCost)
7747 *pMemoryTypeIndex = memTypeIndex;
7757 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7761 VmaAllocator allocator,
7765 VMA_ASSERT(allocator && pCreateInfo && pPool);
7767 VMA_DEBUG_LOG(
"vmaCreatePool");
7769 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7771 return allocator->CreatePool(pCreateInfo, pPool);
7775 VmaAllocator allocator,
7778 VMA_ASSERT(allocator && pool);
7780 VMA_DEBUG_LOG(
"vmaDestroyPool");
7782 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7784 allocator->DestroyPool(pool);
7788 VmaAllocator allocator,
7792 VMA_ASSERT(allocator && pool && pPoolStats);
7794 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7796 allocator->GetPoolStats(pool, pPoolStats);
7800 VmaAllocator allocator,
7802 size_t* pLostAllocationCount)
7804 VMA_ASSERT(allocator && pool);
7806 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7808 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7812 VmaAllocator allocator,
7813 const VkMemoryRequirements* pVkMemoryRequirements,
7815 VmaAllocation* pAllocation,
7818 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7820 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7822 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7824 VkResult result = allocator->AllocateMemory(
7825 *pVkMemoryRequirements,
7831 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7834 if(pAllocationInfo && result == VK_SUCCESS)
7836 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7843 VmaAllocator allocator,
7846 VmaAllocation* pAllocation,
7849 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7851 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7853 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7855 VkMemoryRequirements vkMemReq = {};
7856 bool requiresDedicatedAllocation =
false;
7857 bool prefersDedicatedAllocation =
false;
7858 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
7859 requiresDedicatedAllocation,
7860 prefersDedicatedAllocation);
7862 VkResult result = allocator->AllocateMemory(
7864 requiresDedicatedAllocation,
7865 prefersDedicatedAllocation,
7869 VMA_SUBALLOCATION_TYPE_BUFFER,
7872 if(pAllocationInfo && result == VK_SUCCESS)
7874 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7881 VmaAllocator allocator,
7884 VmaAllocation* pAllocation,
7887 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7889 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7891 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7893 VkResult result = AllocateMemoryForImage(
7897 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7900 if(pAllocationInfo && result == VK_SUCCESS)
7902 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7909 VmaAllocator allocator,
7910 VmaAllocation allocation)
7912 VMA_ASSERT(allocator && allocation);
7914 VMA_DEBUG_LOG(
"vmaFreeMemory");
7916 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7918 allocator->FreeMemory(allocation);
7922 VmaAllocator allocator,
7923 VmaAllocation allocation,
7926 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7928 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7930 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7934 VmaAllocator allocator,
7935 VmaAllocation allocation,
7938 VMA_ASSERT(allocator && allocation);
7940 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7942 allocation->SetUserData(pUserData);
7946 VmaAllocator allocator,
7947 VmaAllocation* pAllocation)
7949 VMA_ASSERT(allocator && pAllocation);
7951 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7953 allocator->CreateLostAllocation(pAllocation);
7957 VmaAllocator allocator,
7958 VmaAllocation allocation,
7961 VMA_ASSERT(allocator && allocation && ppData);
7963 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7965 return (*allocator->GetVulkanFunctions().vkMapMemory)(
7966 allocator->m_hDevice,
7967 allocation->GetMemory(),
7968 allocation->GetOffset(),
7969 allocation->GetSize(),
7975 VmaAllocator allocator,
7976 VmaAllocation allocation)
7978 VMA_ASSERT(allocator && allocation);
7980 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7982 (*allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, allocation->GetMemory());
7987 VMA_ASSERT(allocator);
7989 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7991 allocator->UnmapPersistentlyMappedMemory();
7996 VMA_ASSERT(allocator);
7998 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8000 return allocator->MapPersistentlyMappedMemory();
8004 VmaAllocator allocator,
8005 VmaAllocation* pAllocations,
8006 size_t allocationCount,
8007 VkBool32* pAllocationsChanged,
8011 VMA_ASSERT(allocator && pAllocations);
8013 VMA_DEBUG_LOG(
"vmaDefragment");
8015 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8017 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8021 VmaAllocator allocator,
8022 const VkBufferCreateInfo* pBufferCreateInfo,
8025 VmaAllocation* pAllocation,
8028 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8030 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8032 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8034 *pBuffer = VK_NULL_HANDLE;
8035 *pAllocation = VK_NULL_HANDLE;
8038 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8039 allocator->m_hDevice,
8041 allocator->GetAllocationCallbacks(),
8046 VkMemoryRequirements vkMemReq = {};
8047 bool requiresDedicatedAllocation =
false;
8048 bool prefersDedicatedAllocation =
false;
8049 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8050 requiresDedicatedAllocation, prefersDedicatedAllocation);
8053 res = allocator->AllocateMemory(
8055 requiresDedicatedAllocation,
8056 prefersDedicatedAllocation,
8059 *pAllocationCreateInfo,
8060 VMA_SUBALLOCATION_TYPE_BUFFER,
8065 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8066 allocator->m_hDevice,
8068 (*pAllocation)->GetMemory(),
8069 (*pAllocation)->GetOffset());
8073 if(pAllocationInfo != VMA_NULL)
8075 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8079 allocator->FreeMemory(*pAllocation);
8080 *pAllocation = VK_NULL_HANDLE;
8083 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8084 *pBuffer = VK_NULL_HANDLE;
8091 VmaAllocator allocator,
8093 VmaAllocation allocation)
8095 if(buffer != VK_NULL_HANDLE)
8097 VMA_ASSERT(allocator);
8099 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8101 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8103 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8105 allocator->FreeMemory(allocation);
8110 VmaAllocator allocator,
8111 const VkImageCreateInfo* pImageCreateInfo,
8114 VmaAllocation* pAllocation,
8117 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8119 VMA_DEBUG_LOG(
"vmaCreateImage");
8121 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8123 *pImage = VK_NULL_HANDLE;
8124 *pAllocation = VK_NULL_HANDLE;
8127 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8128 allocator->m_hDevice,
8130 allocator->GetAllocationCallbacks(),
8134 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8135 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8136 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8139 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8143 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8144 allocator->m_hDevice,
8146 (*pAllocation)->GetMemory(),
8147 (*pAllocation)->GetOffset());
8151 if(pAllocationInfo != VMA_NULL)
8153 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8157 allocator->FreeMemory(*pAllocation);
8158 *pAllocation = VK_NULL_HANDLE;
8161 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8162 *pImage = VK_NULL_HANDLE;
8169 VmaAllocator allocator,
8171 VmaAllocation allocation)
8173 if(image != VK_NULL_HANDLE)
8175 VMA_ASSERT(allocator);
8177 VMA_DEBUG_LOG(
"vmaDestroyImage");
8179 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8181 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8183 allocator->FreeMemory(allocation);
8187 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:568
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:785
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:593
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:578
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:759
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:572
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1044
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:590
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1198
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:914
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:968
Definition: vk_mem_alloc.h:823
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:561
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:856
Definition: vk_mem_alloc.h:769
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:605
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:652
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:587
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:602
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:773
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:717
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:575
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:716
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:583
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1202
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:622
VmaStatInfo total
Definition: vk_mem_alloc.h:726
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1210
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:839
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1193
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:576
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:497
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:596
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:922
Definition: vk_mem_alloc.h:916
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1054
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:573
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:858
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:938
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:974
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:559
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:925
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
VmaMemoryUsage
Definition: vk_mem_alloc.h:754
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1188
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1206
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:765
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:574
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:722
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:503
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:524
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:529
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1208
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:850
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:984
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:569
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:705
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:933
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:516
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:830
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:718
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:520
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:928
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:768
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:845
Definition: vk_mem_alloc.h:836
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:708
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:571
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:946
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:608
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:977
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:834
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:863
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:640
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:724
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:717
Definition: vk_mem_alloc.h:896
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:580
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:518
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:579
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:960
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1065
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:599
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:717
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:714
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:965
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1049
Definition: vk_mem_alloc.h:832
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1204
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:567
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:582
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:712
No intended memory usage specified. Use other members of VmaAllocationCreateInfo to specify your requ...
Definition: vk_mem_alloc.h:757
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:918
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:710
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:577
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:581
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:796
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:762
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:1060
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:557
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:570
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1030
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:812
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:887
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:718
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:725
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:971
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:718
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1035