23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 410 #include <vulkan/vulkan.h> 412 VK_DEFINE_HANDLE(VmaAllocator)
416 VmaAllocator allocator,
418 VkDeviceMemory memory,
422 VmaAllocator allocator,
424 VkDeviceMemory memory,
576 VmaAllocator* pAllocator);
580 VmaAllocator allocator);
587 VmaAllocator allocator,
588 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
595 VmaAllocator allocator,
596 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
605 VmaAllocator allocator,
606 uint32_t memoryTypeIndex,
607 VkMemoryPropertyFlags* pFlags);
618 VmaAllocator allocator,
619 uint32_t frameIndex);
649 VmaAllocator allocator,
652 #define VMA_STATS_STRING_ENABLED 1 654 #if VMA_STATS_STRING_ENABLED 660 VmaAllocator allocator,
661 char** ppStatsString,
662 VkBool32 detailedMap);
665 VmaAllocator allocator,
668 #endif // #if VMA_STATS_STRING_ENABLED 670 VK_DEFINE_HANDLE(VmaPool)
799 VmaAllocator allocator,
800 uint32_t memoryTypeBits,
802 uint32_t* pMemoryTypeIndex);
912 VmaAllocator allocator,
919 VmaAllocator allocator,
929 VmaAllocator allocator,
940 VmaAllocator allocator,
942 size_t* pLostAllocationCount);
944 VK_DEFINE_HANDLE(VmaAllocation)
997 VmaAllocator allocator,
998 const VkMemoryRequirements* pVkMemoryRequirements,
1000 VmaAllocation* pAllocation,
1010 VmaAllocator allocator,
1013 VmaAllocation* pAllocation,
1018 VmaAllocator allocator,
1021 VmaAllocation* pAllocation,
1026 VmaAllocator allocator,
1027 VmaAllocation allocation);
1031 VmaAllocator allocator,
1032 VmaAllocation allocation,
1037 VmaAllocator allocator,
1038 VmaAllocation allocation,
1052 VmaAllocator allocator,
1053 VmaAllocation* pAllocation);
1064 VmaAllocator allocator,
1065 VmaAllocation allocation,
1069 VmaAllocator allocator,
1070 VmaAllocation allocation);
1201 VmaAllocator allocator,
1202 VmaAllocation* pAllocations,
1203 size_t allocationCount,
1204 VkBool32* pAllocationsChanged,
1227 VmaAllocator allocator,
1228 const VkBufferCreateInfo* pBufferCreateInfo,
1231 VmaAllocation* pAllocation,
1243 VmaAllocator allocator,
1245 VmaAllocation allocation);
1249 VmaAllocator allocator,
1250 const VkImageCreateInfo* pImageCreateInfo,
1253 VmaAllocation* pAllocation,
1265 VmaAllocator allocator,
1267 VmaAllocation allocation);
1273 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1276 #ifdef __INTELLISENSE__ 1277 #define VMA_IMPLEMENTATION 1280 #ifdef VMA_IMPLEMENTATION 1281 #undef VMA_IMPLEMENTATION 1303 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1304 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1316 #if VMA_USE_STL_CONTAINERS 1317 #define VMA_USE_STL_VECTOR 1 1318 #define VMA_USE_STL_UNORDERED_MAP 1 1319 #define VMA_USE_STL_LIST 1 1322 #if VMA_USE_STL_VECTOR 1326 #if VMA_USE_STL_UNORDERED_MAP 1327 #include <unordered_map> 1330 #if VMA_USE_STL_LIST 1339 #include <algorithm> 1343 #if !defined(_WIN32) 1350 #define VMA_ASSERT(expr) assert(expr) 1352 #define VMA_ASSERT(expr) 1358 #ifndef VMA_HEAVY_ASSERT 1360 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1362 #define VMA_HEAVY_ASSERT(expr) 1368 #define VMA_NULL nullptr 1371 #ifndef VMA_ALIGN_OF 1372 #define VMA_ALIGN_OF(type) (__alignof(type)) 1375 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1377 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1379 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1383 #ifndef VMA_SYSTEM_FREE 1385 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1387 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1392 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1396 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1400 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1404 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1407 #ifndef VMA_DEBUG_LOG 1408 #define VMA_DEBUG_LOG(format, ...) 1418 #if VMA_STATS_STRING_ENABLED 1419 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1421 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1423 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1425 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1427 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1429 snprintf(outStr, strLen,
"%p", ptr);
1439 void Lock() { m_Mutex.lock(); }
1440 void Unlock() { m_Mutex.unlock(); }
1444 #define VMA_MUTEX VmaMutex 1455 #ifndef VMA_ATOMIC_UINT32 1456 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1459 #ifndef VMA_BEST_FIT 1472 #define VMA_BEST_FIT (1) 1475 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 1480 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 1483 #ifndef VMA_DEBUG_ALIGNMENT 1488 #define VMA_DEBUG_ALIGNMENT (1) 1491 #ifndef VMA_DEBUG_MARGIN 1496 #define VMA_DEBUG_MARGIN (0) 1499 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1504 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1507 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1512 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1515 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1516 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1520 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1521 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1525 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1526 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1530 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1536 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1537 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1540 static inline uint32_t CountBitsSet(uint32_t v)
1542 uint32_t c = v - ((v >> 1) & 0x55555555);
1543 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1544 c = ((c >> 4) + c) & 0x0F0F0F0F;
1545 c = ((c >> 8) + c) & 0x00FF00FF;
1546 c = ((c >> 16) + c) & 0x0000FFFF;
1552 template <
typename T>
1553 static inline T VmaAlignUp(T val, T align)
1555 return (val + align - 1) / align * align;
1559 template <
typename T>
1560 inline T VmaRoundDiv(T x, T y)
1562 return (x + (y / (T)2)) / y;
1567 template<
typename Iterator,
typename Compare>
1568 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1570 Iterator centerValue = end; --centerValue;
1571 Iterator insertIndex = beg;
1572 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1574 if(cmp(*memTypeIndex, *centerValue))
1576 if(insertIndex != memTypeIndex)
1578 VMA_SWAP(*memTypeIndex, *insertIndex);
1583 if(insertIndex != centerValue)
1585 VMA_SWAP(*insertIndex, *centerValue);
1590 template<
typename Iterator,
typename Compare>
1591 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1595 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1596 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1597 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1601 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1603 #endif // #ifndef VMA_SORT 1612 static inline bool VmaBlocksOnSamePage(
1613 VkDeviceSize resourceAOffset,
1614 VkDeviceSize resourceASize,
1615 VkDeviceSize resourceBOffset,
1616 VkDeviceSize pageSize)
1618 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1619 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1620 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1621 VkDeviceSize resourceBStart = resourceBOffset;
1622 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1623 return resourceAEndPage == resourceBStartPage;
1626 enum VmaSuballocationType
1628 VMA_SUBALLOCATION_TYPE_FREE = 0,
1629 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1630 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1631 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1632 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1633 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1634 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1643 static inline bool VmaIsBufferImageGranularityConflict(
1644 VmaSuballocationType suballocType1,
1645 VmaSuballocationType suballocType2)
1647 if(suballocType1 > suballocType2)
1649 VMA_SWAP(suballocType1, suballocType2);
1652 switch(suballocType1)
1654 case VMA_SUBALLOCATION_TYPE_FREE:
1656 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1658 case VMA_SUBALLOCATION_TYPE_BUFFER:
1660 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1661 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1662 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1664 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1665 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1666 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1667 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1669 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1670 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1682 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1683 m_pMutex(useMutex ? &mutex : VMA_NULL)
1700 VMA_MUTEX* m_pMutex;
1703 #if VMA_DEBUG_GLOBAL_MUTEX 1704 static VMA_MUTEX gDebugGlobalMutex;
1705 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1707 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1711 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1722 template <
typename IterT,
typename KeyT,
typename CmpT>
1723 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1725 size_t down = 0, up = (end - beg);
1728 const size_t mid = (down + up) / 2;
1729 if(cmp(*(beg+mid), key))
1744 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1746 if((pAllocationCallbacks != VMA_NULL) &&
1747 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1749 return (*pAllocationCallbacks->pfnAllocation)(
1750 pAllocationCallbacks->pUserData,
1753 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1757 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1761 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1763 if((pAllocationCallbacks != VMA_NULL) &&
1764 (pAllocationCallbacks->pfnFree != VMA_NULL))
1766 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1770 VMA_SYSTEM_FREE(ptr);
1774 template<
typename T>
1775 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1777 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1780 template<
typename T>
1781 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1783 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1786 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1788 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1790 template<
typename T>
1791 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1794 VmaFree(pAllocationCallbacks, ptr);
1797 template<
typename T>
1798 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1802 for(
size_t i = count; i--; )
1806 VmaFree(pAllocationCallbacks, ptr);
1811 template<
typename T>
1812 class VmaStlAllocator
1815 const VkAllocationCallbacks*
const m_pCallbacks;
1816 typedef T value_type;
1818 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1819 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1821 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1822 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1824 template<
typename U>
1825 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1827 return m_pCallbacks == rhs.m_pCallbacks;
1829 template<
typename U>
1830 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1832 return m_pCallbacks != rhs.m_pCallbacks;
1835 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1838 #if VMA_USE_STL_VECTOR 1840 #define VmaVector std::vector 1842 template<
typename T,
typename allocatorT>
1843 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1845 vec.insert(vec.begin() + index, item);
1848 template<
typename T,
typename allocatorT>
1849 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1851 vec.erase(vec.begin() + index);
1854 #else // #if VMA_USE_STL_VECTOR 1859 template<
typename T,
typename AllocatorT>
1863 typedef T value_type;
1865 VmaVector(
const AllocatorT& allocator) :
1866 m_Allocator(allocator),
1873 VmaVector(
size_t count,
const AllocatorT& allocator) :
1874 m_Allocator(allocator),
1875 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1881 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1882 m_Allocator(src.m_Allocator),
1883 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
1884 m_Count(src.m_Count),
1885 m_Capacity(src.m_Count)
1889 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1895 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1898 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1902 resize(rhs.m_Count);
1905 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1911 bool empty()
const {
return m_Count == 0; }
1912 size_t size()
const {
return m_Count; }
1913 T* data() {
return m_pArray; }
1914 const T* data()
const {
return m_pArray; }
1916 T& operator[](
size_t index)
1918 VMA_HEAVY_ASSERT(index < m_Count);
1919 return m_pArray[index];
1921 const T& operator[](
size_t index)
const 1923 VMA_HEAVY_ASSERT(index < m_Count);
1924 return m_pArray[index];
1929 VMA_HEAVY_ASSERT(m_Count > 0);
1932 const T& front()
const 1934 VMA_HEAVY_ASSERT(m_Count > 0);
1939 VMA_HEAVY_ASSERT(m_Count > 0);
1940 return m_pArray[m_Count - 1];
1942 const T& back()
const 1944 VMA_HEAVY_ASSERT(m_Count > 0);
1945 return m_pArray[m_Count - 1];
1948 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1950 newCapacity = VMA_MAX(newCapacity, m_Count);
1952 if((newCapacity < m_Capacity) && !freeMemory)
1954 newCapacity = m_Capacity;
1957 if(newCapacity != m_Capacity)
1959 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1962 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1964 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1965 m_Capacity = newCapacity;
1966 m_pArray = newArray;
1970 void resize(
size_t newCount,
bool freeMemory =
false)
1972 size_t newCapacity = m_Capacity;
1973 if(newCount > m_Capacity)
1975 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1979 newCapacity = newCount;
1982 if(newCapacity != m_Capacity)
1984 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1985 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1986 if(elementsToCopy != 0)
1988 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1990 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1991 m_Capacity = newCapacity;
1992 m_pArray = newArray;
1998 void clear(
bool freeMemory =
false)
2000 resize(0, freeMemory);
2003 void insert(
size_t index,
const T& src)
2005 VMA_HEAVY_ASSERT(index <= m_Count);
2006 const size_t oldCount = size();
2007 resize(oldCount + 1);
2008 if(index < oldCount)
2010 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2012 m_pArray[index] = src;
2015 void remove(
size_t index)
2017 VMA_HEAVY_ASSERT(index < m_Count);
2018 const size_t oldCount = size();
2019 if(index < oldCount - 1)
2021 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2023 resize(oldCount - 1);
2026 void push_back(
const T& src)
2028 const size_t newIndex = size();
2029 resize(newIndex + 1);
2030 m_pArray[newIndex] = src;
2035 VMA_HEAVY_ASSERT(m_Count > 0);
2039 void push_front(
const T& src)
2046 VMA_HEAVY_ASSERT(m_Count > 0);
2050 typedef T* iterator;
2052 iterator begin() {
return m_pArray; }
2053 iterator end() {
return m_pArray + m_Count; }
2056 AllocatorT m_Allocator;
2062 template<
typename T,
typename allocatorT>
2063 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2065 vec.insert(index, item);
2068 template<
typename T,
typename allocatorT>
2069 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2074 #endif // #if VMA_USE_STL_VECTOR 2076 template<
typename CmpLess,
typename VectorT>
2077 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2079 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2081 vector.data() + vector.size(),
2083 CmpLess()) - vector.data();
2084 VmaVectorInsert(vector, indexToInsert, value);
2085 return indexToInsert;
2088 template<
typename CmpLess,
typename VectorT>
2089 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2092 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2097 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2099 size_t indexToRemove = it - vector.begin();
2100 VmaVectorRemove(vector, indexToRemove);
2106 template<
typename CmpLess,
typename VectorT>
2107 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2110 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2112 vector.data() + vector.size(),
2115 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2117 return it - vector.begin();
2121 return vector.size();
2133 template<
typename T>
2134 class VmaPoolAllocator
2137 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2138 ~VmaPoolAllocator();
2146 uint32_t NextFreeIndex;
2153 uint32_t FirstFreeIndex;
2156 const VkAllocationCallbacks* m_pAllocationCallbacks;
2157 size_t m_ItemsPerBlock;
2158 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2160 ItemBlock& CreateNewBlock();
2163 template<
typename T>
2164 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2165 m_pAllocationCallbacks(pAllocationCallbacks),
2166 m_ItemsPerBlock(itemsPerBlock),
2167 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2169 VMA_ASSERT(itemsPerBlock > 0);
2172 template<
typename T>
2173 VmaPoolAllocator<T>::~VmaPoolAllocator()
2178 template<
typename T>
2179 void VmaPoolAllocator<T>::Clear()
2181 for(
size_t i = m_ItemBlocks.size(); i--; )
2182 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2183 m_ItemBlocks.clear();
2186 template<
typename T>
2187 T* VmaPoolAllocator<T>::Alloc()
2189 for(
size_t i = m_ItemBlocks.size(); i--; )
2191 ItemBlock& block = m_ItemBlocks[i];
2193 if(block.FirstFreeIndex != UINT32_MAX)
2195 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2196 block.FirstFreeIndex = pItem->NextFreeIndex;
2197 return &pItem->Value;
2202 ItemBlock& newBlock = CreateNewBlock();
2203 Item*
const pItem = &newBlock.pItems[0];
2204 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2205 return &pItem->Value;
2208 template<
typename T>
2209 void VmaPoolAllocator<T>::Free(T* ptr)
2212 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2214 ItemBlock& block = m_ItemBlocks[i];
2218 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2221 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2223 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2224 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2225 block.FirstFreeIndex = index;
2229 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2232 template<
typename T>
2233 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2235 ItemBlock newBlock = {
2236 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2238 m_ItemBlocks.push_back(newBlock);
2241 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2242 newBlock.pItems[i].NextFreeIndex = i + 1;
2243 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2244 return m_ItemBlocks.back();
2250 #if VMA_USE_STL_LIST 2252 #define VmaList std::list 2254 #else // #if VMA_USE_STL_LIST 2256 template<
typename T>
2265 template<
typename T>
2269 typedef VmaListItem<T> ItemType;
2271 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2275 size_t GetCount()
const {
return m_Count; }
2276 bool IsEmpty()
const {
return m_Count == 0; }
2278 ItemType* Front() {
return m_pFront; }
2279 const ItemType* Front()
const {
return m_pFront; }
2280 ItemType* Back() {
return m_pBack; }
2281 const ItemType* Back()
const {
return m_pBack; }
2283 ItemType* PushBack();
2284 ItemType* PushFront();
2285 ItemType* PushBack(
const T& value);
2286 ItemType* PushFront(
const T& value);
2291 ItemType* InsertBefore(ItemType* pItem);
2293 ItemType* InsertAfter(ItemType* pItem);
2295 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2296 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2298 void Remove(ItemType* pItem);
2301 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2302 VmaPoolAllocator<ItemType> m_ItemAllocator;
2308 VmaRawList(
const VmaRawList<T>& src);
2309 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2312 template<
typename T>
2313 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2314 m_pAllocationCallbacks(pAllocationCallbacks),
2315 m_ItemAllocator(pAllocationCallbacks, 128),
2322 template<
typename T>
2323 VmaRawList<T>::~VmaRawList()
2329 template<
typename T>
2330 void VmaRawList<T>::Clear()
2332 if(IsEmpty() ==
false)
2334 ItemType* pItem = m_pBack;
2335 while(pItem != VMA_NULL)
2337 ItemType*
const pPrevItem = pItem->pPrev;
2338 m_ItemAllocator.Free(pItem);
2341 m_pFront = VMA_NULL;
2347 template<
typename T>
2348 VmaListItem<T>* VmaRawList<T>::PushBack()
2350 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2351 pNewItem->pNext = VMA_NULL;
2354 pNewItem->pPrev = VMA_NULL;
2355 m_pFront = pNewItem;
2361 pNewItem->pPrev = m_pBack;
2362 m_pBack->pNext = pNewItem;
2369 template<
typename T>
2370 VmaListItem<T>* VmaRawList<T>::PushFront()
2372 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2373 pNewItem->pPrev = VMA_NULL;
2376 pNewItem->pNext = VMA_NULL;
2377 m_pFront = pNewItem;
2383 pNewItem->pNext = m_pFront;
2384 m_pFront->pPrev = pNewItem;
2385 m_pFront = pNewItem;
2391 template<
typename T>
2392 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2394 ItemType*
const pNewItem = PushBack();
2395 pNewItem->Value = value;
2399 template<
typename T>
2400 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2402 ItemType*
const pNewItem = PushFront();
2403 pNewItem->Value = value;
2407 template<
typename T>
2408 void VmaRawList<T>::PopBack()
2410 VMA_HEAVY_ASSERT(m_Count > 0);
2411 ItemType*
const pBackItem = m_pBack;
2412 ItemType*
const pPrevItem = pBackItem->pPrev;
2413 if(pPrevItem != VMA_NULL)
2415 pPrevItem->pNext = VMA_NULL;
2417 m_pBack = pPrevItem;
2418 m_ItemAllocator.Free(pBackItem);
2422 template<
typename T>
2423 void VmaRawList<T>::PopFront()
2425 VMA_HEAVY_ASSERT(m_Count > 0);
2426 ItemType*
const pFrontItem = m_pFront;
2427 ItemType*
const pNextItem = pFrontItem->pNext;
2428 if(pNextItem != VMA_NULL)
2430 pNextItem->pPrev = VMA_NULL;
2432 m_pFront = pNextItem;
2433 m_ItemAllocator.Free(pFrontItem);
2437 template<
typename T>
2438 void VmaRawList<T>::Remove(ItemType* pItem)
2440 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2441 VMA_HEAVY_ASSERT(m_Count > 0);
2443 if(pItem->pPrev != VMA_NULL)
2445 pItem->pPrev->pNext = pItem->pNext;
2449 VMA_HEAVY_ASSERT(m_pFront == pItem);
2450 m_pFront = pItem->pNext;
2453 if(pItem->pNext != VMA_NULL)
2455 pItem->pNext->pPrev = pItem->pPrev;
2459 VMA_HEAVY_ASSERT(m_pBack == pItem);
2460 m_pBack = pItem->pPrev;
2463 m_ItemAllocator.Free(pItem);
2467 template<
typename T>
2468 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2470 if(pItem != VMA_NULL)
2472 ItemType*
const prevItem = pItem->pPrev;
2473 ItemType*
const newItem = m_ItemAllocator.Alloc();
2474 newItem->pPrev = prevItem;
2475 newItem->pNext = pItem;
2476 pItem->pPrev = newItem;
2477 if(prevItem != VMA_NULL)
2479 prevItem->pNext = newItem;
2483 VMA_HEAVY_ASSERT(m_pFront == pItem);
2493 template<
typename T>
2494 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2496 if(pItem != VMA_NULL)
2498 ItemType*
const nextItem = pItem->pNext;
2499 ItemType*
const newItem = m_ItemAllocator.Alloc();
2500 newItem->pNext = nextItem;
2501 newItem->pPrev = pItem;
2502 pItem->pNext = newItem;
2503 if(nextItem != VMA_NULL)
2505 nextItem->pPrev = newItem;
2509 VMA_HEAVY_ASSERT(m_pBack == pItem);
2519 template<
typename T>
2520 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2522 ItemType*
const newItem = InsertBefore(pItem);
2523 newItem->Value = value;
2527 template<
typename T>
2528 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2530 ItemType*
const newItem = InsertAfter(pItem);
2531 newItem->Value = value;
2535 template<
typename T,
typename AllocatorT>
2548 T& operator*()
const 2550 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2551 return m_pItem->Value;
2553 T* operator->()
const 2555 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2556 return &m_pItem->Value;
2559 iterator& operator++()
2561 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2562 m_pItem = m_pItem->pNext;
2565 iterator& operator--()
2567 if(m_pItem != VMA_NULL)
2569 m_pItem = m_pItem->pPrev;
2573 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2574 m_pItem = m_pList->Back();
2579 iterator operator++(
int)
2581 iterator result = *
this;
2585 iterator operator--(
int)
2587 iterator result = *
this;
2592 bool operator==(
const iterator& rhs)
const 2594 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2595 return m_pItem == rhs.m_pItem;
2597 bool operator!=(
const iterator& rhs)
const 2599 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2600 return m_pItem != rhs.m_pItem;
2604 VmaRawList<T>* m_pList;
2605 VmaListItem<T>* m_pItem;
2607 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2613 friend class VmaList<T, AllocatorT>;
2616 class const_iterator
2625 const_iterator(
const iterator& src) :
2626 m_pList(src.m_pList),
2627 m_pItem(src.m_pItem)
2631 const T& operator*()
const 2633 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2634 return m_pItem->Value;
2636 const T* operator->()
const 2638 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2639 return &m_pItem->Value;
2642 const_iterator& operator++()
2644 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2645 m_pItem = m_pItem->pNext;
2648 const_iterator& operator--()
2650 if(m_pItem != VMA_NULL)
2652 m_pItem = m_pItem->pPrev;
2656 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2657 m_pItem = m_pList->Back();
2662 const_iterator operator++(
int)
2664 const_iterator result = *
this;
2668 const_iterator operator--(
int)
2670 const_iterator result = *
this;
2675 bool operator==(
const const_iterator& rhs)
const 2677 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2678 return m_pItem == rhs.m_pItem;
2680 bool operator!=(
const const_iterator& rhs)
const 2682 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2683 return m_pItem != rhs.m_pItem;
2687 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2693 const VmaRawList<T>* m_pList;
2694 const VmaListItem<T>* m_pItem;
2696 friend class VmaList<T, AllocatorT>;
2699 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2701 bool empty()
const {
return m_RawList.IsEmpty(); }
2702 size_t size()
const {
return m_RawList.GetCount(); }
2704 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2705 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2707 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2708 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2710 void clear() { m_RawList.Clear(); }
2711 void push_back(
const T& value) { m_RawList.PushBack(value); }
2712 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2713 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2716 VmaRawList<T> m_RawList;
2719 #endif // #if VMA_USE_STL_LIST 2727 #if VMA_USE_STL_UNORDERED_MAP 2729 #define VmaPair std::pair 2731 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2732 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2734 #else // #if VMA_USE_STL_UNORDERED_MAP 2736 template<
typename T1,
typename T2>
2742 VmaPair() : first(), second() { }
2743 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2749 template<
typename KeyT,
typename ValueT>
2753 typedef VmaPair<KeyT, ValueT> PairType;
2754 typedef PairType* iterator;
2756 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2758 iterator begin() {
return m_Vector.begin(); }
2759 iterator end() {
return m_Vector.end(); }
2761 void insert(
const PairType& pair);
2762 iterator find(
const KeyT& key);
2763 void erase(iterator it);
2766 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2769 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2771 template<
typename FirstT,
typename SecondT>
2772 struct VmaPairFirstLess
2774 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2776 return lhs.first < rhs.first;
2778 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2780 return lhs.first < rhsFirst;
2784 template<
typename KeyT,
typename ValueT>
2785 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2787 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2789 m_Vector.data() + m_Vector.size(),
2791 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2792 VmaVectorInsert(m_Vector, indexToInsert, pair);
2795 template<
typename KeyT,
typename ValueT>
2796 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2798 PairType* it = VmaBinaryFindFirstNotLess(
2800 m_Vector.data() + m_Vector.size(),
2802 VmaPairFirstLess<KeyT, ValueT>());
2803 if((it != m_Vector.end()) && (it->first == key))
2809 return m_Vector.end();
2813 template<
typename KeyT,
typename ValueT>
2814 void VmaMap<KeyT, ValueT>::erase(iterator it)
2816 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2819 #endif // #if VMA_USE_STL_UNORDERED_MAP 2825 class VmaDeviceMemoryBlock;
2827 enum VMA_BLOCK_VECTOR_TYPE
2829 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2830 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2831 VMA_BLOCK_VECTOR_TYPE_COUNT
2837 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2838 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2841 struct VmaAllocation_T
2844 enum ALLOCATION_TYPE
2846 ALLOCATION_TYPE_NONE,
2847 ALLOCATION_TYPE_BLOCK,
2848 ALLOCATION_TYPE_DEDICATED,
2851 VmaAllocation_T(uint32_t currentFrameIndex) :
2854 m_pUserData(VMA_NULL),
2855 m_Type(ALLOCATION_TYPE_NONE),
2856 m_SuballocationType(VMA_SUBALLOCATION_TYPE_UNKNOWN),
2857 m_LastUseFrameIndex(currentFrameIndex)
2861 void InitBlockAllocation(
2863 VmaDeviceMemoryBlock* block,
2864 VkDeviceSize offset,
2865 VkDeviceSize alignment,
2867 VmaSuballocationType suballocationType,
2871 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2872 VMA_ASSERT(block != VMA_NULL);
2873 m_Type = ALLOCATION_TYPE_BLOCK;
2874 m_Alignment = alignment;
2876 m_pUserData = pUserData;
2877 m_SuballocationType = suballocationType;
2878 m_BlockAllocation.m_hPool = hPool;
2879 m_BlockAllocation.m_Block = block;
2880 m_BlockAllocation.m_Offset = offset;
2881 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
2886 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2887 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
2888 m_Type = ALLOCATION_TYPE_BLOCK;
2889 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
2890 m_BlockAllocation.m_Block = VMA_NULL;
2891 m_BlockAllocation.m_Offset = 0;
2892 m_BlockAllocation.m_CanBecomeLost =
true;
2895 void ChangeBlockAllocation(
2896 VmaDeviceMemoryBlock* block,
2897 VkDeviceSize offset)
2899 VMA_ASSERT(block != VMA_NULL);
2900 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2901 m_BlockAllocation.m_Block = block;
2902 m_BlockAllocation.m_Offset = offset;
2905 void InitDedicatedAllocation(
2906 uint32_t memoryTypeIndex,
2907 VkDeviceMemory hMemory,
2908 VmaSuballocationType suballocationType,
2914 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2915 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2916 m_Type = ALLOCATION_TYPE_DEDICATED;
2919 m_pUserData = pUserData;
2920 m_SuballocationType = suballocationType;
2921 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2922 m_DedicatedAllocation.m_hMemory = hMemory;
2923 m_DedicatedAllocation.m_PersistentMap = persistentMap;
2924 m_DedicatedAllocation.m_pMappedData = pMappedData;
2927 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2928 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2929 VkDeviceSize GetSize()
const {
return m_Size; }
2930 void* GetUserData()
const {
return m_pUserData; }
2931 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2932 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2934 VmaDeviceMemoryBlock* GetBlock()
const 2936 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2937 return m_BlockAllocation.m_Block;
2939 VkDeviceSize GetOffset()
const;
2940 VkDeviceMemory GetMemory()
const;
2941 uint32_t GetMemoryTypeIndex()
const;
2942 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2943 void* GetMappedData()
const;
2944 bool CanBecomeLost()
const;
2945 VmaPool GetPool()
const;
2947 VkResult DedicatedAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator);
2948 void DedicatedAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator);
2950 uint32_t GetLastUseFrameIndex()
const 2952 return m_LastUseFrameIndex.load();
2954 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
2956 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
2966 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
2968 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
2970 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
2982 VkDeviceSize m_Alignment;
2983 VkDeviceSize m_Size;
2985 ALLOCATION_TYPE m_Type;
2986 VmaSuballocationType m_SuballocationType;
2987 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
2990 struct BlockAllocation
2993 VmaDeviceMemoryBlock* m_Block;
2994 VkDeviceSize m_Offset;
2995 bool m_CanBecomeLost;
2999 struct DedicatedAllocation
3001 uint32_t m_MemoryTypeIndex;
3002 VkDeviceMemory m_hMemory;
3003 bool m_PersistentMap;
3004 void* m_pMappedData;
3010 BlockAllocation m_BlockAllocation;
3012 DedicatedAllocation m_DedicatedAllocation;
3020 struct VmaSuballocation
3022 VkDeviceSize offset;
3024 VmaAllocation hAllocation;
3025 VmaSuballocationType type;
3028 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3031 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3046 struct VmaAllocationRequest
3048 VkDeviceSize offset;
3049 VkDeviceSize sumFreeSize;
3050 VkDeviceSize sumItemSize;
3051 VmaSuballocationList::iterator item;
3052 size_t itemsToMakeLostCount;
3054 VkDeviceSize CalcCost()
const 3056 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3064 class VmaBlockMetadata
3067 VmaBlockMetadata(VmaAllocator hAllocator);
3068 ~VmaBlockMetadata();
3069 void Init(VkDeviceSize size);
3072 bool Validate()
const;
3073 VkDeviceSize GetSize()
const {
return m_Size; }
3074 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3075 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3076 VkDeviceSize GetUnusedRangeSizeMax()
const;
3078 bool IsEmpty()
const;
3080 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3083 #if VMA_STATS_STRING_ENABLED 3084 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3088 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3093 bool CreateAllocationRequest(
3094 uint32_t currentFrameIndex,
3095 uint32_t frameInUseCount,
3096 VkDeviceSize bufferImageGranularity,
3097 VkDeviceSize allocSize,
3098 VkDeviceSize allocAlignment,
3099 VmaSuballocationType allocType,
3100 bool canMakeOtherLost,
3101 VmaAllocationRequest* pAllocationRequest);
3103 bool MakeRequestedAllocationsLost(
3104 uint32_t currentFrameIndex,
3105 uint32_t frameInUseCount,
3106 VmaAllocationRequest* pAllocationRequest);
3108 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3112 const VmaAllocationRequest& request,
3113 VmaSuballocationType type,
3114 VkDeviceSize allocSize,
3115 VmaAllocation hAllocation);
3118 void Free(
const VmaAllocation allocation);
3121 VkDeviceSize m_Size;
3122 uint32_t m_FreeCount;
3123 VkDeviceSize m_SumFreeSize;
3124 VmaSuballocationList m_Suballocations;
3127 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3129 bool ValidateFreeSuballocationList()
const;
3133 bool CheckAllocation(
3134 uint32_t currentFrameIndex,
3135 uint32_t frameInUseCount,
3136 VkDeviceSize bufferImageGranularity,
3137 VkDeviceSize allocSize,
3138 VkDeviceSize allocAlignment,
3139 VmaSuballocationType allocType,
3140 VmaSuballocationList::const_iterator suballocItem,
3141 bool canMakeOtherLost,
3142 VkDeviceSize* pOffset,
3143 size_t* itemsToMakeLostCount,
3144 VkDeviceSize* pSumFreeSize,
3145 VkDeviceSize* pSumItemSize)
const;
3147 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3151 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3154 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3157 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3166 class VmaDeviceMemoryBlock
3169 uint32_t m_MemoryTypeIndex;
3170 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3171 VkDeviceMemory m_hMemory;
3172 bool m_PersistentMap;
3173 void* m_pMappedData;
3174 VmaBlockMetadata m_Metadata;
3176 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3178 ~VmaDeviceMemoryBlock()
3180 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3185 uint32_t newMemoryTypeIndex,
3186 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
3187 VkDeviceMemory newMemory,
3188 VkDeviceSize newSize,
3192 void Destroy(VmaAllocator allocator);
3195 bool Validate()
const;
3198 struct VmaPointerLess
3200 bool operator()(
const void* lhs,
const void* rhs)
const 3206 class VmaDefragmentator;
3214 struct VmaBlockVector
3217 VmaAllocator hAllocator,
3218 uint32_t memoryTypeIndex,
3219 VMA_BLOCK_VECTOR_TYPE blockVectorType,
3220 VkDeviceSize preferredBlockSize,
3221 size_t minBlockCount,
3222 size_t maxBlockCount,
3223 VkDeviceSize bufferImageGranularity,
3224 uint32_t frameInUseCount,
3228 VkResult CreateMinBlocks();
3230 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3231 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3232 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3233 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3234 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const {
return m_BlockVectorType; }
3238 bool IsEmpty()
const {
return m_Blocks.empty(); }
3241 VmaPool hCurrentPool,
3242 uint32_t currentFrameIndex,
3243 const VkMemoryRequirements& vkMemReq,
3245 VmaSuballocationType suballocType,
3246 VmaAllocation* pAllocation);
3249 VmaAllocation hAllocation);
3254 #if VMA_STATS_STRING_ENABLED 3255 void PrintDetailedMap(
class VmaJsonWriter& json);
3258 void UnmapPersistentlyMappedMemory();
3259 VkResult MapPersistentlyMappedMemory();
3261 void MakePoolAllocationsLost(
3262 uint32_t currentFrameIndex,
3263 size_t* pLostAllocationCount);
3265 VmaDefragmentator* EnsureDefragmentator(
3266 VmaAllocator hAllocator,
3267 uint32_t currentFrameIndex);
3269 VkResult Defragment(
3271 VkDeviceSize& maxBytesToMove,
3272 uint32_t& maxAllocationsToMove);
3274 void DestroyDefragmentator();
3277 friend class VmaDefragmentator;
3279 const VmaAllocator m_hAllocator;
3280 const uint32_t m_MemoryTypeIndex;
3281 const VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3282 const VkDeviceSize m_PreferredBlockSize;
3283 const size_t m_MinBlockCount;
3284 const size_t m_MaxBlockCount;
3285 const VkDeviceSize m_BufferImageGranularity;
3286 const uint32_t m_FrameInUseCount;
3287 const bool m_IsCustomPool;
3290 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3294 bool m_HasEmptyBlock;
3295 VmaDefragmentator* m_pDefragmentator;
3298 void Remove(VmaDeviceMemoryBlock* pBlock);
3302 void IncrementallySortBlocks();
3304 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3310 VmaBlockVector m_BlockVector;
3314 VmaAllocator hAllocator,
3318 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3320 #if VMA_STATS_STRING_ENABLED 3325 class VmaDefragmentator
3327 const VmaAllocator m_hAllocator;
3328 VmaBlockVector*
const m_pBlockVector;
3329 uint32_t m_CurrentFrameIndex;
3330 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3331 VkDeviceSize m_BytesMoved;
3332 uint32_t m_AllocationsMoved;
3334 struct AllocationInfo
3336 VmaAllocation m_hAllocation;
3337 VkBool32* m_pChanged;
3340 m_hAllocation(VK_NULL_HANDLE),
3341 m_pChanged(VMA_NULL)
3346 struct AllocationInfoSizeGreater
3348 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3350 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3355 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3359 VmaDeviceMemoryBlock* m_pBlock;
3360 bool m_HasNonMovableAllocations;
3361 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3363 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3365 m_HasNonMovableAllocations(true),
3366 m_Allocations(pAllocationCallbacks),
3367 m_pMappedDataForDefragmentation(VMA_NULL)
3371 void CalcHasNonMovableAllocations()
3373 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3374 const size_t defragmentAllocCount = m_Allocations.size();
3375 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3378 void SortAllocationsBySizeDescecnding()
3380 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3383 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3384 void Unmap(VmaAllocator hAllocator);
3388 void* m_pMappedDataForDefragmentation;
3391 struct BlockPointerLess
3393 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3395 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3397 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3399 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3405 struct BlockInfoCompareMoveDestination
3407 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3409 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3413 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3417 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3425 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3426 BlockInfoVector m_Blocks;
3428 VkResult DefragmentRound(
3429 VkDeviceSize maxBytesToMove,
3430 uint32_t maxAllocationsToMove);
3432 static bool MoveMakesSense(
3433 size_t dstBlockIndex, VkDeviceSize dstOffset,
3434 size_t srcBlockIndex, VkDeviceSize srcOffset);
3438 VmaAllocator hAllocator,
3439 VmaBlockVector* pBlockVector,
3440 uint32_t currentFrameIndex);
3442 ~VmaDefragmentator();
3444 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3445 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3447 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3449 VkResult Defragment(
3450 VkDeviceSize maxBytesToMove,
3451 uint32_t maxAllocationsToMove);
3455 struct VmaAllocator_T
3458 bool m_UseKhrDedicatedAllocation;
3460 bool m_AllocationCallbacksSpecified;
3461 VkAllocationCallbacks m_AllocationCallbacks;
3465 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
3468 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3469 VMA_MUTEX m_HeapSizeLimitMutex;
3471 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3472 VkPhysicalDeviceMemoryProperties m_MemProps;
3475 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3478 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3479 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3480 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3485 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3487 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3491 return m_VulkanFunctions;
3494 VkDeviceSize GetBufferImageGranularity()
const 3497 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3498 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3501 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3502 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3504 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3506 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3507 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3510 void GetBufferMemoryRequirements(
3512 VkMemoryRequirements& memReq,
3513 bool& requiresDedicatedAllocation,
3514 bool& prefersDedicatedAllocation)
const;
3515 void GetImageMemoryRequirements(
3517 VkMemoryRequirements& memReq,
3518 bool& requiresDedicatedAllocation,
3519 bool& prefersDedicatedAllocation)
const;
3522 VkResult AllocateMemory(
3523 const VkMemoryRequirements& vkMemReq,
3524 bool requiresDedicatedAllocation,
3525 bool prefersDedicatedAllocation,
3526 VkBuffer dedicatedBuffer,
3527 VkImage dedicatedImage,
3529 VmaSuballocationType suballocType,
3530 VmaAllocation* pAllocation);
3533 void FreeMemory(
const VmaAllocation allocation);
3535 void CalculateStats(
VmaStats* pStats);
3537 #if VMA_STATS_STRING_ENABLED 3538 void PrintDetailedMap(
class VmaJsonWriter& json);
3541 void UnmapPersistentlyMappedMemory();
3542 VkResult MapPersistentlyMappedMemory();
3544 VkResult Defragment(
3545 VmaAllocation* pAllocations,
3546 size_t allocationCount,
3547 VkBool32* pAllocationsChanged,
3551 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3554 void DestroyPool(VmaPool pool);
3555 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3557 void SetCurrentFrameIndex(uint32_t frameIndex);
3559 void MakePoolAllocationsLost(
3561 size_t* pLostAllocationCount);
3563 void CreateLostAllocation(VmaAllocation* pAllocation);
3565 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3566 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3569 VkDeviceSize m_PreferredLargeHeapBlockSize;
3570 VkDeviceSize m_PreferredSmallHeapBlockSize;
3572 VkPhysicalDevice m_PhysicalDevice;
3573 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3575 VMA_MUTEX m_PoolsMutex;
3577 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3583 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3585 VkResult AllocateMemoryOfType(
3586 const VkMemoryRequirements& vkMemReq,
3587 bool dedicatedAllocation,
3588 VkBuffer dedicatedBuffer,
3589 VkImage dedicatedImage,
3591 uint32_t memTypeIndex,
3592 VmaSuballocationType suballocType,
3593 VmaAllocation* pAllocation);
3596 VkResult AllocateDedicatedMemory(
3598 VmaSuballocationType suballocType,
3599 uint32_t memTypeIndex,
3602 VkBuffer dedicatedBuffer,
3603 VkImage dedicatedImage,
3604 VmaAllocation* pAllocation);
3607 void FreeDedicatedMemory(VmaAllocation allocation);
3613 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3615 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3618 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3620 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3623 template<
typename T>
3624 static T* VmaAllocate(VmaAllocator hAllocator)
3626 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3629 template<
typename T>
3630 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3632 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3635 template<
typename T>
3636 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3641 VmaFree(hAllocator, ptr);
3645 template<
typename T>
3646 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3650 for(
size_t i = count; i--; )
3652 VmaFree(hAllocator, ptr);
3659 #if VMA_STATS_STRING_ENABLED 3661 class VmaStringBuilder
3664 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3665 size_t GetLength()
const {
return m_Data.size(); }
3666 const char* GetData()
const {
return m_Data.data(); }
3668 void Add(
char ch) { m_Data.push_back(ch); }
3669 void Add(
const char* pStr);
3670 void AddNewLine() { Add(
'\n'); }
3671 void AddNumber(uint32_t num);
3672 void AddNumber(uint64_t num);
3673 void AddPointer(
const void* ptr);
3676 VmaVector< char, VmaStlAllocator<char> > m_Data;
3679 void VmaStringBuilder::Add(
const char* pStr)
3681 const size_t strLen = strlen(pStr);
3684 const size_t oldCount = m_Data.size();
3685 m_Data.resize(oldCount + strLen);
3686 memcpy(m_Data.data() + oldCount, pStr, strLen);
3690 void VmaStringBuilder::AddNumber(uint32_t num)
3693 VmaUint32ToStr(buf,
sizeof(buf), num);
3697 void VmaStringBuilder::AddNumber(uint64_t num)
3700 VmaUint64ToStr(buf,
sizeof(buf), num);
3704 void VmaStringBuilder::AddPointer(
const void* ptr)
3707 VmaPtrToStr(buf,
sizeof(buf), ptr);
3711 #endif // #if VMA_STATS_STRING_ENABLED 3716 #if VMA_STATS_STRING_ENABLED 3721 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3724 void BeginObject(
bool singleLine =
false);
3727 void BeginArray(
bool singleLine =
false);
3730 void WriteString(
const char* pStr);
3731 void BeginString(
const char* pStr = VMA_NULL);
3732 void ContinueString(
const char* pStr);
3733 void ContinueString(uint32_t n);
3734 void ContinueString(uint64_t n);
3735 void EndString(
const char* pStr = VMA_NULL);
3737 void WriteNumber(uint32_t n);
3738 void WriteNumber(uint64_t n);
3739 void WriteBool(
bool b);
3743 static const char*
const INDENT;
3745 enum COLLECTION_TYPE
3747 COLLECTION_TYPE_OBJECT,
3748 COLLECTION_TYPE_ARRAY,
3752 COLLECTION_TYPE type;
3753 uint32_t valueCount;
3754 bool singleLineMode;
3757 VmaStringBuilder& m_SB;
3758 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3759 bool m_InsideString;
3761 void BeginValue(
bool isString);
3762 void WriteIndent(
bool oneLess =
false);
3765 const char*
const VmaJsonWriter::INDENT =
" ";
3767 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3769 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3770 m_InsideString(false)
3774 VmaJsonWriter::~VmaJsonWriter()
3776 VMA_ASSERT(!m_InsideString);
3777 VMA_ASSERT(m_Stack.empty());
3780 void VmaJsonWriter::BeginObject(
bool singleLine)
3782 VMA_ASSERT(!m_InsideString);
3788 item.type = COLLECTION_TYPE_OBJECT;
3789 item.valueCount = 0;
3790 item.singleLineMode = singleLine;
3791 m_Stack.push_back(item);
3794 void VmaJsonWriter::EndObject()
3796 VMA_ASSERT(!m_InsideString);
3801 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3805 void VmaJsonWriter::BeginArray(
bool singleLine)
3807 VMA_ASSERT(!m_InsideString);
3813 item.type = COLLECTION_TYPE_ARRAY;
3814 item.valueCount = 0;
3815 item.singleLineMode = singleLine;
3816 m_Stack.push_back(item);
3819 void VmaJsonWriter::EndArray()
3821 VMA_ASSERT(!m_InsideString);
3826 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3830 void VmaJsonWriter::WriteString(
const char* pStr)
3836 void VmaJsonWriter::BeginString(
const char* pStr)
3838 VMA_ASSERT(!m_InsideString);
3842 m_InsideString =
true;
3843 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3845 ContinueString(pStr);
3849 void VmaJsonWriter::ContinueString(
const char* pStr)
3851 VMA_ASSERT(m_InsideString);
3853 const size_t strLen = strlen(pStr);
3854 for(
size_t i = 0; i < strLen; ++i)
3881 VMA_ASSERT(0 &&
"Character not currently supported.");
3887 void VmaJsonWriter::ContinueString(uint32_t n)
3889 VMA_ASSERT(m_InsideString);
3893 void VmaJsonWriter::ContinueString(uint64_t n)
3895 VMA_ASSERT(m_InsideString);
3899 void VmaJsonWriter::EndString(
const char* pStr)
3901 VMA_ASSERT(m_InsideString);
3902 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3904 ContinueString(pStr);
3907 m_InsideString =
false;
3910 void VmaJsonWriter::WriteNumber(uint32_t n)
3912 VMA_ASSERT(!m_InsideString);
3917 void VmaJsonWriter::WriteNumber(uint64_t n)
3919 VMA_ASSERT(!m_InsideString);
3924 void VmaJsonWriter::WriteBool(
bool b)
3926 VMA_ASSERT(!m_InsideString);
3928 m_SB.Add(b ?
"true" :
"false");
3931 void VmaJsonWriter::WriteNull()
3933 VMA_ASSERT(!m_InsideString);
3938 void VmaJsonWriter::BeginValue(
bool isString)
3940 if(!m_Stack.empty())
3942 StackItem& currItem = m_Stack.back();
3943 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3944 currItem.valueCount % 2 == 0)
3946 VMA_ASSERT(isString);
3949 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3950 currItem.valueCount % 2 != 0)
3954 else if(currItem.valueCount > 0)
3963 ++currItem.valueCount;
3967 void VmaJsonWriter::WriteIndent(
bool oneLess)
3969 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
3973 size_t count = m_Stack.size();
3974 if(count > 0 && oneLess)
3978 for(
size_t i = 0; i < count; ++i)
3985 #endif // #if VMA_STATS_STRING_ENABLED 3989 VkDeviceSize VmaAllocation_T::GetOffset()
const 3993 case ALLOCATION_TYPE_BLOCK:
3994 return m_BlockAllocation.m_Offset;
3995 case ALLOCATION_TYPE_DEDICATED:
4003 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4007 case ALLOCATION_TYPE_BLOCK:
4008 return m_BlockAllocation.m_Block->m_hMemory;
4009 case ALLOCATION_TYPE_DEDICATED:
4010 return m_DedicatedAllocation.m_hMemory;
4013 return VK_NULL_HANDLE;
4017 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4021 case ALLOCATION_TYPE_BLOCK:
4022 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4023 case ALLOCATION_TYPE_DEDICATED:
4024 return m_DedicatedAllocation.m_MemoryTypeIndex;
4031 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 4035 case ALLOCATION_TYPE_BLOCK:
4036 return m_BlockAllocation.m_Block->m_BlockVectorType;
4037 case ALLOCATION_TYPE_DEDICATED:
4038 return (m_DedicatedAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
4041 return VMA_BLOCK_VECTOR_TYPE_COUNT;
4045 void* VmaAllocation_T::GetMappedData()
const 4049 case ALLOCATION_TYPE_BLOCK:
4050 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
4052 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
4059 case ALLOCATION_TYPE_DEDICATED:
4060 return m_DedicatedAllocation.m_pMappedData;
4067 bool VmaAllocation_T::CanBecomeLost()
const 4071 case ALLOCATION_TYPE_BLOCK:
4072 return m_BlockAllocation.m_CanBecomeLost;
4073 case ALLOCATION_TYPE_DEDICATED:
4081 VmaPool VmaAllocation_T::GetPool()
const 4083 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4084 return m_BlockAllocation.m_hPool;
4087 VkResult VmaAllocation_T::DedicatedAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator)
4089 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4090 if(m_DedicatedAllocation.m_PersistentMap)
4092 return (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4093 hAllocator->m_hDevice,
4094 m_DedicatedAllocation.m_hMemory,
4098 &m_DedicatedAllocation.m_pMappedData);
4102 void VmaAllocation_T::DedicatedAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator)
4104 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4105 if(m_DedicatedAllocation.m_pMappedData)
4107 VMA_ASSERT(m_DedicatedAllocation.m_PersistentMap);
4108 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_DedicatedAllocation.m_hMemory);
4109 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4114 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4116 VMA_ASSERT(CanBecomeLost());
4122 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4125 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4130 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4136 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4146 #if VMA_STATS_STRING_ENABLED 4149 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4158 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4162 json.WriteString(
"Blocks");
4165 json.WriteString(
"Allocations");
4168 json.WriteString(
"UnusedRanges");
4171 json.WriteString(
"UsedBytes");
4174 json.WriteString(
"UnusedBytes");
4179 json.WriteString(
"AllocationSize");
4180 json.BeginObject(
true);
4181 json.WriteString(
"Min");
4183 json.WriteString(
"Avg");
4185 json.WriteString(
"Max");
4192 json.WriteString(
"UnusedRangeSize");
4193 json.BeginObject(
true);
4194 json.WriteString(
"Min");
4196 json.WriteString(
"Avg");
4198 json.WriteString(
"Max");
4206 #endif // #if VMA_STATS_STRING_ENABLED 4208 struct VmaSuballocationItemSizeLess
4211 const VmaSuballocationList::iterator lhs,
4212 const VmaSuballocationList::iterator rhs)
const 4214 return lhs->size < rhs->size;
4217 const VmaSuballocationList::iterator lhs,
4218 VkDeviceSize rhsSize)
const 4220 return lhs->size < rhsSize;
4227 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4231 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4232 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4236 VmaBlockMetadata::~VmaBlockMetadata()
4240 void VmaBlockMetadata::Init(VkDeviceSize size)
4244 m_SumFreeSize = size;
4246 VmaSuballocation suballoc = {};
4247 suballoc.offset = 0;
4248 suballoc.size = size;
4249 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4250 suballoc.hAllocation = VK_NULL_HANDLE;
4252 m_Suballocations.push_back(suballoc);
4253 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4255 m_FreeSuballocationsBySize.push_back(suballocItem);
4258 bool VmaBlockMetadata::Validate()
const 4260 if(m_Suballocations.empty())
4266 VkDeviceSize calculatedOffset = 0;
4268 uint32_t calculatedFreeCount = 0;
4270 VkDeviceSize calculatedSumFreeSize = 0;
4273 size_t freeSuballocationsToRegister = 0;
4275 bool prevFree =
false;
4277 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4278 suballocItem != m_Suballocations.cend();
4281 const VmaSuballocation& subAlloc = *suballocItem;
4284 if(subAlloc.offset != calculatedOffset)
4289 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4291 if(prevFree && currFree)
4295 prevFree = currFree;
4297 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4304 calculatedSumFreeSize += subAlloc.size;
4305 ++calculatedFreeCount;
4306 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4308 ++freeSuballocationsToRegister;
4312 calculatedOffset += subAlloc.size;
4317 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4322 VkDeviceSize lastSize = 0;
4323 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4325 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4328 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4333 if(suballocItem->size < lastSize)
4338 lastSize = suballocItem->size;
4343 ValidateFreeSuballocationList() &&
4344 (calculatedOffset == m_Size) &&
4345 (calculatedSumFreeSize == m_SumFreeSize) &&
4346 (calculatedFreeCount == m_FreeCount);
4349 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4351 if(!m_FreeSuballocationsBySize.empty())
4353 return m_FreeSuballocationsBySize.back()->size;
4361 bool VmaBlockMetadata::IsEmpty()
const 4363 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4366 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4370 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4382 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4383 suballocItem != m_Suballocations.cend();
4386 const VmaSuballocation& suballoc = *suballocItem;
4387 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4400 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4402 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4404 inoutStats.
size += m_Size;
4411 #if VMA_STATS_STRING_ENABLED 4413 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4417 json.WriteString(
"TotalBytes");
4418 json.WriteNumber(m_Size);
4420 json.WriteString(
"UnusedBytes");
4421 json.WriteNumber(m_SumFreeSize);
4423 json.WriteString(
"Allocations");
4424 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4426 json.WriteString(
"UnusedRanges");
4427 json.WriteNumber(m_FreeCount);
4429 json.WriteString(
"Suballocations");
4432 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4433 suballocItem != m_Suballocations.cend();
4434 ++suballocItem, ++i)
4436 json.BeginObject(
true);
4438 json.WriteString(
"Type");
4439 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4441 json.WriteString(
"Size");
4442 json.WriteNumber(suballocItem->size);
4444 json.WriteString(
"Offset");
4445 json.WriteNumber(suballocItem->offset);
4454 #endif // #if VMA_STATS_STRING_ENABLED 4466 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4468 VMA_ASSERT(IsEmpty());
4469 pAllocationRequest->offset = 0;
4470 pAllocationRequest->sumFreeSize = m_SumFreeSize;
4471 pAllocationRequest->sumItemSize = 0;
4472 pAllocationRequest->item = m_Suballocations.begin();
4473 pAllocationRequest->itemsToMakeLostCount = 0;
4476 bool VmaBlockMetadata::CreateAllocationRequest(
4477 uint32_t currentFrameIndex,
4478 uint32_t frameInUseCount,
4479 VkDeviceSize bufferImageGranularity,
4480 VkDeviceSize allocSize,
4481 VkDeviceSize allocAlignment,
4482 VmaSuballocationType allocType,
4483 bool canMakeOtherLost,
4484 VmaAllocationRequest* pAllocationRequest)
4486 VMA_ASSERT(allocSize > 0);
4487 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4488 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4489 VMA_HEAVY_ASSERT(Validate());
4492 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4498 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4499 if(freeSuballocCount > 0)
4504 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4505 m_FreeSuballocationsBySize.data(),
4506 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4508 VmaSuballocationItemSizeLess());
4509 size_t index = it - m_FreeSuballocationsBySize.data();
4510 for(; index < freeSuballocCount; ++index)
4515 bufferImageGranularity,
4519 m_FreeSuballocationsBySize[index],
4521 &pAllocationRequest->offset,
4522 &pAllocationRequest->itemsToMakeLostCount,
4523 &pAllocationRequest->sumFreeSize,
4524 &pAllocationRequest->sumItemSize))
4526 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4534 for(
size_t index = freeSuballocCount; index--; )
4539 bufferImageGranularity,
4543 m_FreeSuballocationsBySize[index],
4545 &pAllocationRequest->offset,
4546 &pAllocationRequest->itemsToMakeLostCount,
4547 &pAllocationRequest->sumFreeSize,
4548 &pAllocationRequest->sumItemSize))
4550 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4557 if(canMakeOtherLost)
4561 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4562 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4564 VmaAllocationRequest tmpAllocRequest = {};
4565 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4566 suballocIt != m_Suballocations.end();
4569 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4570 suballocIt->hAllocation->CanBecomeLost())
4575 bufferImageGranularity,
4581 &tmpAllocRequest.offset,
4582 &tmpAllocRequest.itemsToMakeLostCount,
4583 &tmpAllocRequest.sumFreeSize,
4584 &tmpAllocRequest.sumItemSize))
4586 tmpAllocRequest.item = suballocIt;
4588 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4590 *pAllocationRequest = tmpAllocRequest;
4596 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4605 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
4606 uint32_t currentFrameIndex,
4607 uint32_t frameInUseCount,
4608 VmaAllocationRequest* pAllocationRequest)
4610 while(pAllocationRequest->itemsToMakeLostCount > 0)
4612 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4614 ++pAllocationRequest->item;
4616 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4617 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4618 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4619 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4621 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4622 --pAllocationRequest->itemsToMakeLostCount;
4630 VMA_HEAVY_ASSERT(Validate());
4631 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4632 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4637 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4639 uint32_t lostAllocationCount = 0;
4640 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4641 it != m_Suballocations.end();
4644 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4645 it->hAllocation->CanBecomeLost() &&
4646 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4648 it = FreeSuballocation(it);
4649 ++lostAllocationCount;
4652 return lostAllocationCount;
4655 void VmaBlockMetadata::Alloc(
4656 const VmaAllocationRequest& request,
4657 VmaSuballocationType type,
4658 VkDeviceSize allocSize,
4659 VmaAllocation hAllocation)
4661 VMA_ASSERT(request.item != m_Suballocations.end());
4662 VmaSuballocation& suballoc = *request.item;
4664 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4666 VMA_ASSERT(request.offset >= suballoc.offset);
4667 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4668 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4669 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4673 UnregisterFreeSuballocation(request.item);
4675 suballoc.offset = request.offset;
4676 suballoc.size = allocSize;
4677 suballoc.type = type;
4678 suballoc.hAllocation = hAllocation;
4683 VmaSuballocation paddingSuballoc = {};
4684 paddingSuballoc.offset = request.offset + allocSize;
4685 paddingSuballoc.size = paddingEnd;
4686 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4687 VmaSuballocationList::iterator next = request.item;
4689 const VmaSuballocationList::iterator paddingEndItem =
4690 m_Suballocations.insert(next, paddingSuballoc);
4691 RegisterFreeSuballocation(paddingEndItem);
4697 VmaSuballocation paddingSuballoc = {};
4698 paddingSuballoc.offset = request.offset - paddingBegin;
4699 paddingSuballoc.size = paddingBegin;
4700 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4701 const VmaSuballocationList::iterator paddingBeginItem =
4702 m_Suballocations.insert(request.item, paddingSuballoc);
4703 RegisterFreeSuballocation(paddingBeginItem);
4707 m_FreeCount = m_FreeCount - 1;
4708 if(paddingBegin > 0)
4716 m_SumFreeSize -= allocSize;
4719 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
4721 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4722 suballocItem != m_Suballocations.end();
4725 VmaSuballocation& suballoc = *suballocItem;
4726 if(suballoc.hAllocation == allocation)
4728 FreeSuballocation(suballocItem);
4729 VMA_HEAVY_ASSERT(Validate());
4733 VMA_ASSERT(0 &&
"Not found!");
4736 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 4738 VkDeviceSize lastSize = 0;
4739 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
4741 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
4743 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
4748 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4753 if(it->size < lastSize)
4759 lastSize = it->size;
4764 bool VmaBlockMetadata::CheckAllocation(
4765 uint32_t currentFrameIndex,
4766 uint32_t frameInUseCount,
4767 VkDeviceSize bufferImageGranularity,
4768 VkDeviceSize allocSize,
4769 VkDeviceSize allocAlignment,
4770 VmaSuballocationType allocType,
4771 VmaSuballocationList::const_iterator suballocItem,
4772 bool canMakeOtherLost,
4773 VkDeviceSize* pOffset,
4774 size_t* itemsToMakeLostCount,
4775 VkDeviceSize* pSumFreeSize,
4776 VkDeviceSize* pSumItemSize)
const 4778 VMA_ASSERT(allocSize > 0);
4779 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4780 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4781 VMA_ASSERT(pOffset != VMA_NULL);
4783 *itemsToMakeLostCount = 0;
4787 if(canMakeOtherLost)
4789 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4791 *pSumFreeSize = suballocItem->size;
4795 if(suballocItem->hAllocation->CanBecomeLost() &&
4796 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4798 ++*itemsToMakeLostCount;
4799 *pSumItemSize = suballocItem->size;
4808 if(m_Size - suballocItem->offset < allocSize)
4814 *pOffset = suballocItem->offset;
4817 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4819 *pOffset += VMA_DEBUG_MARGIN;
4823 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4824 *pOffset = VmaAlignUp(*pOffset, alignment);
4828 if(bufferImageGranularity > 1)
4830 bool bufferImageGranularityConflict =
false;
4831 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4832 while(prevSuballocItem != m_Suballocations.cbegin())
4835 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4836 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4838 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4840 bufferImageGranularityConflict =
true;
4848 if(bufferImageGranularityConflict)
4850 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4856 if(*pOffset >= suballocItem->offset + suballocItem->size)
4862 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
4865 VmaSuballocationList::const_iterator next = suballocItem;
4867 const VkDeviceSize requiredEndMargin =
4868 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4870 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
4872 if(suballocItem->offset + totalSize > m_Size)
4879 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
4880 if(totalSize > suballocItem->size)
4882 VkDeviceSize remainingSize = totalSize - suballocItem->size;
4883 while(remainingSize > 0)
4886 if(lastSuballocItem == m_Suballocations.cend())
4890 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4892 *pSumFreeSize += lastSuballocItem->size;
4896 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
4897 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
4898 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4900 ++*itemsToMakeLostCount;
4901 *pSumItemSize += lastSuballocItem->size;
4908 remainingSize = (lastSuballocItem->size < remainingSize) ?
4909 remainingSize - lastSuballocItem->size : 0;
4915 if(bufferImageGranularity > 1)
4917 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
4919 while(nextSuballocItem != m_Suballocations.cend())
4921 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4922 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4924 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4926 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
4927 if(nextSuballoc.hAllocation->CanBecomeLost() &&
4928 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4930 ++*itemsToMakeLostCount;
4949 const VmaSuballocation& suballoc = *suballocItem;
4950 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4952 *pSumFreeSize = suballoc.size;
4955 if(suballoc.size < allocSize)
4961 *pOffset = suballoc.offset;
4964 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4966 *pOffset += VMA_DEBUG_MARGIN;
4970 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4971 *pOffset = VmaAlignUp(*pOffset, alignment);
4975 if(bufferImageGranularity > 1)
4977 bool bufferImageGranularityConflict =
false;
4978 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4979 while(prevSuballocItem != m_Suballocations.cbegin())
4982 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4983 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4985 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4987 bufferImageGranularityConflict =
true;
4995 if(bufferImageGranularityConflict)
4997 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5002 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5005 VmaSuballocationList::const_iterator next = suballocItem;
5007 const VkDeviceSize requiredEndMargin =
5008 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5011 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5018 if(bufferImageGranularity > 1)
5020 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5022 while(nextSuballocItem != m_Suballocations.cend())
5024 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5025 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5027 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5046 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5048 VMA_ASSERT(item != m_Suballocations.end());
5049 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5051 VmaSuballocationList::iterator nextItem = item;
5053 VMA_ASSERT(nextItem != m_Suballocations.end());
5054 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5056 item->size += nextItem->size;
5058 m_Suballocations.erase(nextItem);
5061 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5064 VmaSuballocation& suballoc = *suballocItem;
5065 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5066 suballoc.hAllocation = VK_NULL_HANDLE;
5070 m_SumFreeSize += suballoc.size;
5073 bool mergeWithNext =
false;
5074 bool mergeWithPrev =
false;
5076 VmaSuballocationList::iterator nextItem = suballocItem;
5078 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5080 mergeWithNext =
true;
5083 VmaSuballocationList::iterator prevItem = suballocItem;
5084 if(suballocItem != m_Suballocations.begin())
5087 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5089 mergeWithPrev =
true;
5095 UnregisterFreeSuballocation(nextItem);
5096 MergeFreeWithNext(suballocItem);
5101 UnregisterFreeSuballocation(prevItem);
5102 MergeFreeWithNext(prevItem);
5103 RegisterFreeSuballocation(prevItem);
5108 RegisterFreeSuballocation(suballocItem);
5109 return suballocItem;
5113 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5115 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5116 VMA_ASSERT(item->size > 0);
5120 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5122 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5124 if(m_FreeSuballocationsBySize.empty())
5126 m_FreeSuballocationsBySize.push_back(item);
5130 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5138 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5140 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5141 VMA_ASSERT(item->size > 0);
5145 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5147 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5149 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5150 m_FreeSuballocationsBySize.data(),
5151 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5153 VmaSuballocationItemSizeLess());
5154 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5155 index < m_FreeSuballocationsBySize.size();
5158 if(m_FreeSuballocationsBySize[index] == item)
5160 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5163 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5165 VMA_ASSERT(0 &&
"Not found.");
5174 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5175 m_MemoryTypeIndex(UINT32_MAX),
5176 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
5177 m_hMemory(VK_NULL_HANDLE),
5178 m_PersistentMap(false),
5179 m_pMappedData(VMA_NULL),
5180 m_Metadata(hAllocator)
5184 void VmaDeviceMemoryBlock::Init(
5185 uint32_t newMemoryTypeIndex,
5186 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
5187 VkDeviceMemory newMemory,
5188 VkDeviceSize newSize,
5192 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5194 m_MemoryTypeIndex = newMemoryTypeIndex;
5195 m_BlockVectorType = newBlockVectorType;
5196 m_hMemory = newMemory;
5197 m_PersistentMap = persistentMap;
5198 m_pMappedData = pMappedData;
5200 m_Metadata.Init(newSize);
5203 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5207 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5209 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5210 if(m_pMappedData != VMA_NULL)
5212 (allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, m_hMemory);
5213 m_pMappedData = VMA_NULL;
5216 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5217 m_hMemory = VK_NULL_HANDLE;
5220 bool VmaDeviceMemoryBlock::Validate()
const 5222 if((m_hMemory == VK_NULL_HANDLE) ||
5223 (m_Metadata.GetSize() == 0))
5228 return m_Metadata.Validate();
5233 memset(&outInfo, 0,
sizeof(outInfo));
5252 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5260 VmaPool_T::VmaPool_T(
5261 VmaAllocator hAllocator,
5265 createInfo.memoryTypeIndex,
5267 VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
5268 createInfo.blockSize,
5269 createInfo.minBlockCount,
5270 createInfo.maxBlockCount,
5272 createInfo.frameInUseCount,
5277 VmaPool_T::~VmaPool_T()
5281 #if VMA_STATS_STRING_ENABLED 5283 #endif // #if VMA_STATS_STRING_ENABLED 5285 VmaBlockVector::VmaBlockVector(
5286 VmaAllocator hAllocator,
5287 uint32_t memoryTypeIndex,
5288 VMA_BLOCK_VECTOR_TYPE blockVectorType,
5289 VkDeviceSize preferredBlockSize,
5290 size_t minBlockCount,
5291 size_t maxBlockCount,
5292 VkDeviceSize bufferImageGranularity,
5293 uint32_t frameInUseCount,
5294 bool isCustomPool) :
5295 m_hAllocator(hAllocator),
5296 m_MemoryTypeIndex(memoryTypeIndex),
5297 m_BlockVectorType(blockVectorType),
5298 m_PreferredBlockSize(preferredBlockSize),
5299 m_MinBlockCount(minBlockCount),
5300 m_MaxBlockCount(maxBlockCount),
5301 m_BufferImageGranularity(bufferImageGranularity),
5302 m_FrameInUseCount(frameInUseCount),
5303 m_IsCustomPool(isCustomPool),
5304 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5305 m_HasEmptyBlock(false),
5306 m_pDefragmentator(VMA_NULL)
5310 VmaBlockVector::~VmaBlockVector()
5312 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5314 for(
size_t i = m_Blocks.size(); i--; )
5316 m_Blocks[i]->Destroy(m_hAllocator);
5317 vma_delete(m_hAllocator, m_Blocks[i]);
5321 VkResult VmaBlockVector::CreateMinBlocks()
5323 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5325 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5326 if(res != VK_SUCCESS)
5334 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5342 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5344 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5346 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5348 VMA_HEAVY_ASSERT(pBlock->Validate());
5349 pBlock->m_Metadata.AddPoolStats(*pStats);
5353 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5355 VkResult VmaBlockVector::Allocate(
5356 VmaPool hCurrentPool,
5357 uint32_t currentFrameIndex,
5358 const VkMemoryRequirements& vkMemReq,
5360 VmaSuballocationType suballocType,
5361 VmaAllocation* pAllocation)
5364 if(createInfo.
pool != VK_NULL_HANDLE &&
5367 VMA_ASSERT(0 &&
"Usage of VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT must match VMA_POOL_CREATE_PERSISTENT_MAP_BIT.");
5368 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5371 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5375 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5377 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5378 VMA_ASSERT(pCurrBlock);
5379 VmaAllocationRequest currRequest = {};
5380 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5383 m_BufferImageGranularity,
5391 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5394 if(pCurrBlock->m_Metadata.IsEmpty())
5396 m_HasEmptyBlock =
false;
5399 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5400 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5401 (*pAllocation)->InitBlockAllocation(
5410 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5411 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5416 const bool canCreateNewBlock =
5418 (m_Blocks.size() < m_MaxBlockCount);
5421 if(canCreateNewBlock)
5424 VkDeviceSize blockSize = m_PreferredBlockSize;
5425 size_t newBlockIndex = 0;
5426 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5429 if(res < 0 && m_IsCustomPool ==
false)
5433 if(blockSize >= vkMemReq.size)
5435 res = CreateBlock(blockSize, &newBlockIndex);
5440 if(blockSize >= vkMemReq.size)
5442 res = CreateBlock(blockSize, &newBlockIndex);
5447 if(res == VK_SUCCESS)
5449 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5450 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
5453 VmaAllocationRequest allocRequest;
5454 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
5455 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5456 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5457 (*pAllocation)->InitBlockAllocation(
5460 allocRequest.offset,
5466 VMA_HEAVY_ASSERT(pBlock->Validate());
5467 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5476 if(canMakeOtherLost)
5478 uint32_t tryIndex = 0;
5479 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5481 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5482 VmaAllocationRequest bestRequest = {};
5483 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5487 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5489 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5490 VMA_ASSERT(pCurrBlock);
5491 VmaAllocationRequest currRequest = {};
5492 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5495 m_BufferImageGranularity,
5502 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5503 if(pBestRequestBlock == VMA_NULL ||
5504 currRequestCost < bestRequestCost)
5506 pBestRequestBlock = pCurrBlock;
5507 bestRequest = currRequest;
5508 bestRequestCost = currRequestCost;
5510 if(bestRequestCost == 0)
5518 if(pBestRequestBlock != VMA_NULL)
5520 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
5526 if(pBestRequestBlock->m_Metadata.IsEmpty())
5528 m_HasEmptyBlock =
false;
5531 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5532 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5533 (*pAllocation)->InitBlockAllocation(
5542 VMA_HEAVY_ASSERT(pBlock->Validate());
5543 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5557 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5559 return VK_ERROR_TOO_MANY_OBJECTS;
5563 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5566 void VmaBlockVector::Free(
5567 VmaAllocation hAllocation)
5569 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5573 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5575 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5577 pBlock->m_Metadata.Free(hAllocation);
5578 VMA_HEAVY_ASSERT(pBlock->Validate());
5580 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5583 if(pBlock->m_Metadata.IsEmpty())
5586 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5588 pBlockToDelete = pBlock;
5594 m_HasEmptyBlock =
true;
5599 else if(m_HasEmptyBlock)
5601 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
5602 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
5604 pBlockToDelete = pLastBlock;
5605 m_Blocks.pop_back();
5606 m_HasEmptyBlock =
false;
5610 IncrementallySortBlocks();
5615 if(pBlockToDelete != VMA_NULL)
5617 VMA_DEBUG_LOG(
" Deleted empty allocation");
5618 pBlockToDelete->Destroy(m_hAllocator);
5619 vma_delete(m_hAllocator, pBlockToDelete);
5623 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5625 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5627 if(m_Blocks[blockIndex] == pBlock)
5629 VmaVectorRemove(m_Blocks, blockIndex);
5636 void VmaBlockVector::IncrementallySortBlocks()
5639 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5641 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
5643 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5649 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5651 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5652 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5653 allocInfo.allocationSize = blockSize;
5654 VkDeviceMemory mem = VK_NULL_HANDLE;
5655 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
5664 void* pMappedData = VMA_NULL;
5665 const bool persistentMap = (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED);
5666 if(persistentMap && m_hAllocator->m_UnmapPersistentlyMappedMemoryCounter == 0)
5668 res = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5669 m_hAllocator->m_hDevice,
5677 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
5678 m_hAllocator->FreeVulkanMemory(m_MemoryTypeIndex, blockSize, mem);
5684 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5687 (VMA_BLOCK_VECTOR_TYPE)m_BlockVectorType,
5689 allocInfo.allocationSize,
5693 m_Blocks.push_back(pBlock);
5694 if(pNewBlockIndex != VMA_NULL)
5696 *pNewBlockIndex = m_Blocks.size() - 1;
5702 #if VMA_STATS_STRING_ENABLED 5704 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5706 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5712 json.WriteString(
"MemoryTypeIndex");
5713 json.WriteNumber(m_MemoryTypeIndex);
5715 if(m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
5717 json.WriteString(
"Mapped");
5718 json.WriteBool(
true);
5721 json.WriteString(
"BlockSize");
5722 json.WriteNumber(m_PreferredBlockSize);
5724 json.WriteString(
"BlockCount");
5725 json.BeginObject(
true);
5726 if(m_MinBlockCount > 0)
5728 json.WriteString(
"Min");
5729 json.WriteNumber(m_MinBlockCount);
5731 if(m_MaxBlockCount < SIZE_MAX)
5733 json.WriteString(
"Max");
5734 json.WriteNumber(m_MaxBlockCount);
5736 json.WriteString(
"Cur");
5737 json.WriteNumber(m_Blocks.size());
5740 if(m_FrameInUseCount > 0)
5742 json.WriteString(
"FrameInUseCount");
5743 json.WriteNumber(m_FrameInUseCount);
5748 json.WriteString(
"PreferredBlockSize");
5749 json.WriteNumber(m_PreferredBlockSize);
5752 json.WriteString(
"Blocks");
5754 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5756 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
5763 #endif // #if VMA_STATS_STRING_ENABLED 5765 void VmaBlockVector::UnmapPersistentlyMappedMemory()
5767 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5769 for(
size_t i = m_Blocks.size(); i--; )
5771 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5772 if(pBlock->m_pMappedData != VMA_NULL)
5774 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
5775 (m_hAllocator->GetVulkanFunctions().vkUnmapMemory)(m_hAllocator->m_hDevice, pBlock->m_hMemory);
5776 pBlock->m_pMappedData = VMA_NULL;
5781 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
5783 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5785 VkResult finalResult = VK_SUCCESS;
5786 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
5788 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5789 if(pBlock->m_PersistentMap)
5791 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
5792 VkResult localResult = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5793 m_hAllocator->m_hDevice,
5798 &pBlock->m_pMappedData);
5799 if(localResult != VK_SUCCESS)
5801 finalResult = localResult;
5808 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5809 VmaAllocator hAllocator,
5810 uint32_t currentFrameIndex)
5812 if(m_pDefragmentator == VMA_NULL)
5814 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5820 return m_pDefragmentator;
5823 VkResult VmaBlockVector::Defragment(
5825 VkDeviceSize& maxBytesToMove,
5826 uint32_t& maxAllocationsToMove)
5828 if(m_pDefragmentator == VMA_NULL)
5833 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5836 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5839 if(pDefragmentationStats != VMA_NULL)
5841 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5842 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
5845 VMA_ASSERT(bytesMoved <= maxBytesToMove);
5846 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
5852 m_HasEmptyBlock =
false;
5853 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
5855 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
5856 if(pBlock->m_Metadata.IsEmpty())
5858 if(m_Blocks.size() > m_MinBlockCount)
5860 if(pDefragmentationStats != VMA_NULL)
5863 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
5866 VmaVectorRemove(m_Blocks, blockIndex);
5867 pBlock->Destroy(m_hAllocator);
5868 vma_delete(m_hAllocator, pBlock);
5872 m_HasEmptyBlock =
true;
5880 void VmaBlockVector::DestroyDefragmentator()
5882 if(m_pDefragmentator != VMA_NULL)
5884 vma_delete(m_hAllocator, m_pDefragmentator);
5885 m_pDefragmentator = VMA_NULL;
5889 void VmaBlockVector::MakePoolAllocationsLost(
5890 uint32_t currentFrameIndex,
5891 size_t* pLostAllocationCount)
5893 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5895 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5897 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5899 pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
5903 void VmaBlockVector::AddStats(
VmaStats* pStats)
5905 const uint32_t memTypeIndex = m_MemoryTypeIndex;
5906 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
5908 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5910 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5912 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5914 VMA_HEAVY_ASSERT(pBlock->Validate());
5916 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
5917 VmaAddStatInfo(pStats->
total, allocationStatInfo);
5918 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
5919 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
5926 VmaDefragmentator::VmaDefragmentator(
5927 VmaAllocator hAllocator,
5928 VmaBlockVector* pBlockVector,
5929 uint32_t currentFrameIndex) :
5930 m_hAllocator(hAllocator),
5931 m_pBlockVector(pBlockVector),
5932 m_CurrentFrameIndex(currentFrameIndex),
5934 m_AllocationsMoved(0),
5935 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
5936 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
5940 VmaDefragmentator::~VmaDefragmentator()
5942 for(
size_t i = m_Blocks.size(); i--; )
5944 vma_delete(m_hAllocator, m_Blocks[i]);
5948 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
5950 AllocationInfo allocInfo;
5951 allocInfo.m_hAllocation = hAlloc;
5952 allocInfo.m_pChanged = pChanged;
5953 m_Allocations.push_back(allocInfo);
5956 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
5959 if(m_pMappedDataForDefragmentation)
5961 *ppMappedData = m_pMappedDataForDefragmentation;
5966 if(m_pBlock->m_PersistentMap)
5968 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
5969 *ppMappedData = m_pBlock->m_pMappedData;
5974 VkResult res = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5975 hAllocator->m_hDevice,
5976 m_pBlock->m_hMemory,
5980 &m_pMappedDataForDefragmentation);
5981 *ppMappedData = m_pMappedDataForDefragmentation;
5985 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
5987 if(m_pMappedDataForDefragmentation != VMA_NULL)
5989 (hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_pBlock->m_hMemory);
5993 VkResult VmaDefragmentator::DefragmentRound(
5994 VkDeviceSize maxBytesToMove,
5995 uint32_t maxAllocationsToMove)
5997 if(m_Blocks.empty())
6002 size_t srcBlockIndex = m_Blocks.size() - 1;
6003 size_t srcAllocIndex = SIZE_MAX;
6009 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6011 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6014 if(srcBlockIndex == 0)
6021 srcAllocIndex = SIZE_MAX;
6026 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6030 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6031 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6033 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6034 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6035 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6036 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6039 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6041 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6042 VmaAllocationRequest dstAllocRequest;
6043 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6044 m_CurrentFrameIndex,
6045 m_pBlockVector->GetFrameInUseCount(),
6046 m_pBlockVector->GetBufferImageGranularity(),
6051 &dstAllocRequest) &&
6053 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6055 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6058 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6059 (m_BytesMoved + size > maxBytesToMove))
6061 return VK_INCOMPLETE;
6064 void* pDstMappedData = VMA_NULL;
6065 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6066 if(res != VK_SUCCESS)
6071 void* pSrcMappedData = VMA_NULL;
6072 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6073 if(res != VK_SUCCESS)
6080 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6081 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6082 static_cast<size_t>(size));
6084 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6085 pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6087 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6089 if(allocInfo.m_pChanged != VMA_NULL)
6091 *allocInfo.m_pChanged = VK_TRUE;
6094 ++m_AllocationsMoved;
6095 m_BytesMoved += size;
6097 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6105 if(srcAllocIndex > 0)
6111 if(srcBlockIndex > 0)
6114 srcAllocIndex = SIZE_MAX;
6124 VkResult VmaDefragmentator::Defragment(
6125 VkDeviceSize maxBytesToMove,
6126 uint32_t maxAllocationsToMove)
6128 if(m_Allocations.empty())
6134 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6135 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6137 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6138 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6139 m_Blocks.push_back(pBlockInfo);
6143 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6146 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6148 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6150 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6152 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6153 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6154 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6156 (*it)->m_Allocations.push_back(allocInfo);
6164 m_Allocations.clear();
6166 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6168 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6169 pBlockInfo->CalcHasNonMovableAllocations();
6170 pBlockInfo->SortAllocationsBySizeDescecnding();
6174 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6177 VkResult result = VK_SUCCESS;
6178 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6180 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6184 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6186 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6192 bool VmaDefragmentator::MoveMakesSense(
6193 size_t dstBlockIndex, VkDeviceSize dstOffset,
6194 size_t srcBlockIndex, VkDeviceSize srcOffset)
6196 if(dstBlockIndex < srcBlockIndex)
6200 if(dstBlockIndex > srcBlockIndex)
6204 if(dstOffset < srcOffset)
6217 m_PhysicalDevice(pCreateInfo->physicalDevice),
6218 m_hDevice(pCreateInfo->device),
6219 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6220 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6221 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6222 m_UnmapPersistentlyMappedMemoryCounter(0),
6223 m_PreferredLargeHeapBlockSize(0),
6224 m_PreferredSmallHeapBlockSize(0),
6225 m_CurrentFrameIndex(0),
6226 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6230 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6231 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6232 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6234 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6235 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
6237 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6239 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6250 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6251 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6260 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6262 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6263 if(limit != VK_WHOLE_SIZE)
6265 m_HeapSizeLimit[heapIndex] = limit;
6266 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6268 m_MemProps.memoryHeaps[heapIndex].size = limit;
6274 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6276 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6278 for(
size_t blockVectorTypeIndex = 0; blockVectorTypeIndex < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorTypeIndex)
6280 m_pBlockVectors[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, VmaBlockVector)(
6283 static_cast<VMA_BLOCK_VECTOR_TYPE
>(blockVectorTypeIndex),
6287 GetBufferImageGranularity(),
6292 m_pDedicatedAllocations[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6297 VmaAllocator_T::~VmaAllocator_T()
6299 VMA_ASSERT(m_Pools.empty());
6301 for(
size_t i = GetMemoryTypeCount(); i--; )
6303 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
6305 vma_delete(
this, m_pDedicatedAllocations[i][j]);
6306 vma_delete(
this, m_pBlockVectors[i][j]);
6311 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6313 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6314 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6315 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6316 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6317 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6318 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6319 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6320 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6321 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6322 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6323 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6324 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6325 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6326 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6327 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6330 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6332 #define VMA_COPY_IF_NOT_NULL(funcName) \ 6333 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 6335 if(pVulkanFunctions != VMA_NULL)
6337 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
6338 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
6339 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
6340 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
6341 VMA_COPY_IF_NOT_NULL(vkMapMemory);
6342 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
6343 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
6344 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
6345 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
6346 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
6347 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
6348 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
6349 VMA_COPY_IF_NOT_NULL(vkCreateImage);
6350 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
6351 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
6352 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
6355 #undef VMA_COPY_IF_NOT_NULL 6359 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6360 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6361 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6362 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6363 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6364 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6365 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6366 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6367 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6368 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6369 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6370 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6371 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6372 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6373 if(m_UseKhrDedicatedAllocation)
6375 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
6376 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
6380 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6382 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6383 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6384 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6385 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6388 VkResult VmaAllocator_T::AllocateMemoryOfType(
6389 const VkMemoryRequirements& vkMemReq,
6390 bool dedicatedAllocation,
6391 VkBuffer dedicatedBuffer,
6392 VkImage dedicatedImage,
6394 uint32_t memTypeIndex,
6395 VmaSuballocationType suballocType,
6396 VmaAllocation* pAllocation)
6398 VMA_ASSERT(pAllocation != VMA_NULL);
6399 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6401 uint32_t blockVectorType = VmaAllocationCreateFlagsToBlockVectorType(createInfo.
flags);
6402 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6403 VMA_ASSERT(blockVector);
6407 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6408 bool preferDedicatedMemory =
6409 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
6410 dedicatedAllocation ||
6412 vkMemReq.size > preferredBlockSize / 2;
6414 if(preferDedicatedMemory &&
6416 finalCreateInfo.
pool == VK_NULL_HANDLE)
6423 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
6432 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6436 return AllocateDedicatedMemory(
6449 VkResult res = blockVector->Allocate(
6451 m_CurrentFrameIndex.load(),
6456 if(res == VK_SUCCESS)
6464 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6468 res = AllocateDedicatedMemory(
6473 finalCreateInfo.pUserData,
6477 if(res == VK_SUCCESS)
6480 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
6486 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6493 VkResult VmaAllocator_T::AllocateDedicatedMemory(
6495 VmaSuballocationType suballocType,
6496 uint32_t memTypeIndex,
6499 VkBuffer dedicatedBuffer,
6500 VkImage dedicatedImage,
6501 VmaAllocation* pAllocation)
6503 VMA_ASSERT(pAllocation);
6505 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6506 allocInfo.memoryTypeIndex = memTypeIndex;
6507 allocInfo.allocationSize = size;
6509 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
6510 if(m_UseKhrDedicatedAllocation)
6512 if(dedicatedBuffer != VK_NULL_HANDLE)
6514 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
6515 dedicatedAllocInfo.buffer = dedicatedBuffer;
6516 allocInfo.pNext = &dedicatedAllocInfo;
6518 else if(dedicatedImage != VK_NULL_HANDLE)
6520 dedicatedAllocInfo.image = dedicatedImage;
6521 allocInfo.pNext = &dedicatedAllocInfo;
6526 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6527 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6530 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6534 void* pMappedData =
nullptr;
6537 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
6539 res = (*m_VulkanFunctions.vkMapMemory)(
6548 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6549 FreeVulkanMemory(memTypeIndex, size, hMemory);
6555 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6556 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
6560 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6561 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
6562 VMA_ASSERT(pDedicatedAllocations);
6563 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
6566 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
6571 void VmaAllocator_T::GetBufferMemoryRequirements(
6573 VkMemoryRequirements& memReq,
6574 bool& requiresDedicatedAllocation,
6575 bool& prefersDedicatedAllocation)
const 6577 if(m_UseKhrDedicatedAllocation)
6579 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
6580 memReqInfo.buffer = hBuffer;
6582 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
6584 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
6585 memReq2.pNext = &memDedicatedReq;
6587 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
6589 memReq = memReq2.memoryRequirements;
6590 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
6591 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
6595 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
6596 requiresDedicatedAllocation =
false;
6597 prefersDedicatedAllocation =
false;
6601 void VmaAllocator_T::GetImageMemoryRequirements(
6603 VkMemoryRequirements& memReq,
6604 bool& requiresDedicatedAllocation,
6605 bool& prefersDedicatedAllocation)
const 6607 if(m_UseKhrDedicatedAllocation)
6609 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
6610 memReqInfo.image = hImage;
6612 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
6614 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
6615 memReq2.pNext = &memDedicatedReq;
6617 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
6619 memReq = memReq2.memoryRequirements;
6620 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
6621 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
6625 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
6626 requiresDedicatedAllocation =
false;
6627 prefersDedicatedAllocation =
false;
6631 VkResult VmaAllocator_T::AllocateMemory(
6632 const VkMemoryRequirements& vkMemReq,
6633 bool requiresDedicatedAllocation,
6634 bool prefersDedicatedAllocation,
6635 VkBuffer dedicatedBuffer,
6636 VkImage dedicatedImage,
6638 VmaSuballocationType suballocType,
6639 VmaAllocation* pAllocation)
6644 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6645 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6647 if(requiresDedicatedAllocation)
6651 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
6652 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6654 if(createInfo.
pool != VK_NULL_HANDLE)
6656 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
6657 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6660 if((createInfo.
pool != VK_NULL_HANDLE) &&
6663 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
6664 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6667 if(createInfo.
pool != VK_NULL_HANDLE)
6669 return createInfo.
pool->m_BlockVector.Allocate(
6671 m_CurrentFrameIndex.load(),
6680 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6681 uint32_t memTypeIndex = UINT32_MAX;
6683 if(res == VK_SUCCESS)
6685 res = AllocateMemoryOfType(
6687 requiresDedicatedAllocation || prefersDedicatedAllocation,
6695 if(res == VK_SUCCESS)
6705 memoryTypeBits &= ~(1u << memTypeIndex);
6708 if(res == VK_SUCCESS)
6710 res = AllocateMemoryOfType(
6712 requiresDedicatedAllocation || prefersDedicatedAllocation,
6720 if(res == VK_SUCCESS)
6730 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6741 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6743 VMA_ASSERT(allocation);
6745 if(allocation->CanBecomeLost() ==
false ||
6746 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6748 switch(allocation->GetType())
6750 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6752 VmaBlockVector* pBlockVector = VMA_NULL;
6753 VmaPool hPool = allocation->GetPool();
6754 if(hPool != VK_NULL_HANDLE)
6756 pBlockVector = &hPool->m_BlockVector;
6760 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6761 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
6762 pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6764 pBlockVector->Free(allocation);
6767 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
6768 FreeDedicatedMemory(allocation);
6775 vma_delete(
this, allocation);
6778 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6781 InitStatInfo(pStats->
total);
6782 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6784 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6788 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6790 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6791 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6793 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6794 VMA_ASSERT(pBlockVector);
6795 pBlockVector->AddStats(pStats);
6801 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6802 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6804 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
6809 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6811 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6812 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6813 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6815 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex][blockVectorType];
6816 VMA_ASSERT(pDedicatedAllocVector);
6817 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
6820 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
6821 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6822 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6823 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6829 VmaPostprocessCalcStatInfo(pStats->
total);
6830 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6831 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6832 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6833 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6836 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6838 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
6840 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
6842 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6844 for(uint32_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
6846 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6847 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6848 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6852 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6853 AllocationVectorType* pDedicatedAllocationsVector = m_pDedicatedAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6854 for(
size_t dedicatedAllocIndex = pDedicatedAllocationsVector->size(); dedicatedAllocIndex--; )
6856 VmaAllocation hAlloc = (*pDedicatedAllocationsVector)[dedicatedAllocIndex];
6857 hAlloc->DedicatedAllocUnmapPersistentlyMappedMemory(
this);
6863 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6864 pBlockVector->UnmapPersistentlyMappedMemory();
6871 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6872 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6874 m_Pools[poolIndex]->GetBlockVector().UnmapPersistentlyMappedMemory();
6881 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
6883 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
6884 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
6886 VkResult finalResult = VK_SUCCESS;
6887 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6891 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6892 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6894 m_Pools[poolIndex]->GetBlockVector().MapPersistentlyMappedMemory();
6898 for(uint32_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
6900 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6901 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6902 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6906 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
6907 AllocationVectorType* pAllocationsVector = m_pDedicatedAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6908 for(
size_t dedicatedAllocIndex = 0, dedicatedAllocCount = pAllocationsVector->size(); dedicatedAllocIndex < dedicatedAllocCount; ++dedicatedAllocIndex)
6910 VmaAllocation hAlloc = (*pAllocationsVector)[dedicatedAllocIndex];
6911 hAlloc->DedicatedAllocMapPersistentlyMappedMemory(
this);
6917 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6918 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
6919 if(localResult != VK_SUCCESS)
6921 finalResult = localResult;
6933 VkResult VmaAllocator_T::Defragment(
6934 VmaAllocation* pAllocations,
6935 size_t allocationCount,
6936 VkBool32* pAllocationsChanged,
6940 if(pAllocationsChanged != VMA_NULL)
6942 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
6944 if(pDefragmentationStats != VMA_NULL)
6946 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
6949 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
6951 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
6952 return VK_ERROR_MEMORY_MAP_FAILED;
6955 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
6957 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
6959 const size_t poolCount = m_Pools.size();
6962 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
6964 VmaAllocation hAlloc = pAllocations[allocIndex];
6966 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
6968 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
6970 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
6972 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
6974 VmaBlockVector* pAllocBlockVector =
nullptr;
6976 const VmaPool hAllocPool = hAlloc->GetPool();
6978 if(hAllocPool != VK_NULL_HANDLE)
6980 pAllocBlockVector = &hAllocPool->GetBlockVector();
6985 pAllocBlockVector = m_pBlockVectors[memTypeIndex][hAlloc->GetBlockVectorType()];
6988 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
6990 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
6991 &pAllocationsChanged[allocIndex] : VMA_NULL;
6992 pDefragmentator->AddAllocation(hAlloc, pChanged);
6996 VkResult result = VK_SUCCESS;
7000 VkDeviceSize maxBytesToMove = SIZE_MAX;
7001 uint32_t maxAllocationsToMove = UINT32_MAX;
7002 if(pDefragmentationInfo != VMA_NULL)
7009 for(uint32_t memTypeIndex = 0;
7010 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7014 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7016 for(uint32_t blockVectorType = 0;
7017 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
7020 result = m_pBlockVectors[memTypeIndex][blockVectorType]->Defragment(
7021 pDefragmentationStats,
7023 maxAllocationsToMove);
7029 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7031 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7032 pDefragmentationStats,
7034 maxAllocationsToMove);
7040 for(
size_t poolIndex = poolCount; poolIndex--; )
7042 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7046 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7048 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7050 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
7052 m_pBlockVectors[memTypeIndex][blockVectorType]->DestroyDefragmentator();
7060 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7062 if(hAllocation->CanBecomeLost())
7068 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7069 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7072 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7076 pAllocationInfo->
offset = 0;
7077 pAllocationInfo->
size = hAllocation->GetSize();
7079 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7082 else if(localLastUseFrameIndex == localCurrFrameIndex)
7084 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7085 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7086 pAllocationInfo->
offset = hAllocation->GetOffset();
7087 pAllocationInfo->
size = hAllocation->GetSize();
7088 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7089 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7094 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7096 localLastUseFrameIndex = localCurrFrameIndex;
7104 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7105 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7106 pAllocationInfo->
offset = hAllocation->GetOffset();
7107 pAllocationInfo->
size = hAllocation->GetSize();
7108 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7109 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7113 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7115 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7128 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7130 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7131 if(res != VK_SUCCESS)
7133 vma_delete(
this, *pPool);
7140 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7141 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7147 void VmaAllocator_T::DestroyPool(VmaPool pool)
7151 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7152 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7153 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7156 vma_delete(
this, pool);
7159 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7161 pool->m_BlockVector.GetPoolStats(pPoolStats);
7164 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7166 m_CurrentFrameIndex.store(frameIndex);
7169 void VmaAllocator_T::MakePoolAllocationsLost(
7171 size_t* pLostAllocationCount)
7173 hPool->m_BlockVector.MakePoolAllocationsLost(
7174 m_CurrentFrameIndex.load(),
7175 pLostAllocationCount);
7178 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7180 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
7181 (*pAllocation)->InitLost();
7184 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7186 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7189 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7191 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7192 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7194 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7195 if(res == VK_SUCCESS)
7197 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7202 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7207 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7210 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7212 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7218 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7220 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7222 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7225 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7227 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7228 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7230 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7231 m_HeapSizeLimit[heapIndex] += size;
7235 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7237 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7239 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7241 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7242 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex][allocation->GetBlockVectorType()];
7243 VMA_ASSERT(pDedicatedAllocations);
7244 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7245 VMA_ASSERT(success);
7248 VkDeviceMemory hMemory = allocation->GetMemory();
7250 if(allocation->GetMappedData() != VMA_NULL)
7252 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7255 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7257 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7260 #if VMA_STATS_STRING_ENABLED 7262 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7264 bool dedicatedAllocationsStarted =
false;
7265 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7267 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7268 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7270 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex][blockVectorType];
7271 VMA_ASSERT(pDedicatedAllocVector);
7272 if(pDedicatedAllocVector->empty() ==
false)
7274 if(dedicatedAllocationsStarted ==
false)
7276 dedicatedAllocationsStarted =
true;
7277 json.WriteString(
"DedicatedAllocations");
7281 json.BeginString(
"Type ");
7282 json.ContinueString(memTypeIndex);
7283 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7285 json.ContinueString(
" Mapped");
7291 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7293 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7294 json.BeginObject(
true);
7296 json.WriteString(
"Size");
7297 json.WriteNumber(hAlloc->GetSize());
7299 json.WriteString(
"Type");
7300 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7309 if(dedicatedAllocationsStarted)
7315 bool allocationsStarted =
false;
7316 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7318 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7320 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
7322 if(allocationsStarted ==
false)
7324 allocationsStarted =
true;
7325 json.WriteString(
"DefaultPools");
7329 json.BeginString(
"Type ");
7330 json.ContinueString(memTypeIndex);
7331 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7333 json.ContinueString(
" Mapped");
7337 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(json);
7341 if(allocationsStarted)
7348 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7349 const size_t poolCount = m_Pools.size();
7352 json.WriteString(
"Pools");
7354 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7356 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7363 #endif // #if VMA_STATS_STRING_ENABLED 7365 static VkResult AllocateMemoryForImage(
7366 VmaAllocator allocator,
7369 VmaSuballocationType suballocType,
7370 VmaAllocation* pAllocation)
7372 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7374 VkMemoryRequirements vkMemReq = {};
7375 bool requiresDedicatedAllocation =
false;
7376 bool prefersDedicatedAllocation =
false;
7377 allocator->GetImageMemoryRequirements(image, vkMemReq,
7378 requiresDedicatedAllocation, prefersDedicatedAllocation);
7380 return allocator->AllocateMemory(
7382 requiresDedicatedAllocation,
7383 prefersDedicatedAllocation,
7386 *pAllocationCreateInfo,
7396 VmaAllocator* pAllocator)
7398 VMA_ASSERT(pCreateInfo && pAllocator);
7399 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7405 VmaAllocator allocator)
7407 if(allocator != VK_NULL_HANDLE)
7409 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7410 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7411 vma_delete(&allocationCallbacks, allocator);
7416 VmaAllocator allocator,
7417 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7419 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7420 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7424 VmaAllocator allocator,
7425 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7427 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7428 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7432 VmaAllocator allocator,
7433 uint32_t memoryTypeIndex,
7434 VkMemoryPropertyFlags* pFlags)
7436 VMA_ASSERT(allocator && pFlags);
7437 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7438 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7442 VmaAllocator allocator,
7443 uint32_t frameIndex)
7445 VMA_ASSERT(allocator);
7446 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7448 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7450 allocator->SetCurrentFrameIndex(frameIndex);
7454 VmaAllocator allocator,
7457 VMA_ASSERT(allocator && pStats);
7458 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7459 allocator->CalculateStats(pStats);
7462 #if VMA_STATS_STRING_ENABLED 7465 VmaAllocator allocator,
7466 char** ppStatsString,
7467 VkBool32 detailedMap)
7469 VMA_ASSERT(allocator && ppStatsString);
7470 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7472 VmaStringBuilder sb(allocator);
7474 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7478 allocator->CalculateStats(&stats);
7480 json.WriteString(
"Total");
7481 VmaPrintStatInfo(json, stats.
total);
7483 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7485 json.BeginString(
"Heap ");
7486 json.ContinueString(heapIndex);
7490 json.WriteString(
"Size");
7491 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7493 json.WriteString(
"Flags");
7494 json.BeginArray(
true);
7495 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7497 json.WriteString(
"DEVICE_LOCAL");
7503 json.WriteString(
"Stats");
7504 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7507 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7509 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7511 json.BeginString(
"Type ");
7512 json.ContinueString(typeIndex);
7517 json.WriteString(
"Flags");
7518 json.BeginArray(
true);
7519 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7520 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7522 json.WriteString(
"DEVICE_LOCAL");
7524 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7526 json.WriteString(
"HOST_VISIBLE");
7528 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7530 json.WriteString(
"HOST_COHERENT");
7532 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7534 json.WriteString(
"HOST_CACHED");
7536 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7538 json.WriteString(
"LAZILY_ALLOCATED");
7544 json.WriteString(
"Stats");
7545 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7554 if(detailedMap == VK_TRUE)
7556 allocator->PrintDetailedMap(json);
7562 const size_t len = sb.GetLength();
7563 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7566 memcpy(pChars, sb.GetData(), len);
7569 *ppStatsString = pChars;
7573 VmaAllocator allocator,
7576 if(pStatsString != VMA_NULL)
7578 VMA_ASSERT(allocator);
7579 size_t len = strlen(pStatsString);
7580 vma_delete_array(allocator, pStatsString, len + 1);
7584 #endif // #if VMA_STATS_STRING_ENABLED 7589 VmaAllocator allocator,
7590 uint32_t memoryTypeBits,
7592 uint32_t* pMemoryTypeIndex)
7594 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7595 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7596 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7598 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7600 if(preferredFlags == 0)
7602 preferredFlags = requiredFlags;
7605 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7608 switch(pAllocationCreateInfo->
usage)
7613 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7616 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7619 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7620 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7623 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7624 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7630 *pMemoryTypeIndex = UINT32_MAX;
7631 uint32_t minCost = UINT32_MAX;
7632 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7633 memTypeIndex < allocator->GetMemoryTypeCount();
7634 ++memTypeIndex, memTypeBit <<= 1)
7637 if((memTypeBit & memoryTypeBits) != 0)
7639 const VkMemoryPropertyFlags currFlags =
7640 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7642 if((requiredFlags & ~currFlags) == 0)
7645 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7647 if(currCost < minCost)
7649 *pMemoryTypeIndex = memTypeIndex;
7659 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7663 VmaAllocator allocator,
7667 VMA_ASSERT(allocator && pCreateInfo && pPool);
7669 VMA_DEBUG_LOG(
"vmaCreatePool");
7671 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7673 return allocator->CreatePool(pCreateInfo, pPool);
7677 VmaAllocator allocator,
7680 VMA_ASSERT(allocator && pool);
7682 VMA_DEBUG_LOG(
"vmaDestroyPool");
7684 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7686 allocator->DestroyPool(pool);
7690 VmaAllocator allocator,
7694 VMA_ASSERT(allocator && pool && pPoolStats);
7696 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7698 allocator->GetPoolStats(pool, pPoolStats);
7702 VmaAllocator allocator,
7704 size_t* pLostAllocationCount)
7706 VMA_ASSERT(allocator && pool);
7708 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7710 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7714 VmaAllocator allocator,
7715 const VkMemoryRequirements* pVkMemoryRequirements,
7717 VmaAllocation* pAllocation,
7720 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7722 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7724 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7726 VkResult result = allocator->AllocateMemory(
7727 *pVkMemoryRequirements,
7733 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7736 if(pAllocationInfo && result == VK_SUCCESS)
7738 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7745 VmaAllocator allocator,
7748 VmaAllocation* pAllocation,
7751 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7753 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7755 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7757 VkMemoryRequirements vkMemReq = {};
7758 bool requiresDedicatedAllocation =
false;
7759 bool prefersDedicatedAllocation =
false;
7760 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
7761 requiresDedicatedAllocation,
7762 prefersDedicatedAllocation);
7764 VkResult result = allocator->AllocateMemory(
7766 requiresDedicatedAllocation,
7767 prefersDedicatedAllocation,
7771 VMA_SUBALLOCATION_TYPE_BUFFER,
7774 if(pAllocationInfo && result == VK_SUCCESS)
7776 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7783 VmaAllocator allocator,
7786 VmaAllocation* pAllocation,
7789 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7791 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7793 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7795 VkResult result = AllocateMemoryForImage(
7799 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7802 if(pAllocationInfo && result == VK_SUCCESS)
7804 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7811 VmaAllocator allocator,
7812 VmaAllocation allocation)
7814 VMA_ASSERT(allocator && allocation);
7816 VMA_DEBUG_LOG(
"vmaFreeMemory");
7818 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7820 allocator->FreeMemory(allocation);
7824 VmaAllocator allocator,
7825 VmaAllocation allocation,
7828 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7830 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7832 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7836 VmaAllocator allocator,
7837 VmaAllocation allocation,
7840 VMA_ASSERT(allocator && allocation);
7842 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7844 allocation->SetUserData(pUserData);
7848 VmaAllocator allocator,
7849 VmaAllocation* pAllocation)
7851 VMA_ASSERT(allocator && pAllocation);
7853 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7855 allocator->CreateLostAllocation(pAllocation);
7859 VmaAllocator allocator,
7860 VmaAllocation allocation,
7863 VMA_ASSERT(allocator && allocation && ppData);
7865 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7867 return (*allocator->GetVulkanFunctions().vkMapMemory)(
7868 allocator->m_hDevice,
7869 allocation->GetMemory(),
7870 allocation->GetOffset(),
7871 allocation->GetSize(),
7877 VmaAllocator allocator,
7878 VmaAllocation allocation)
7880 VMA_ASSERT(allocator && allocation);
7882 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7884 (*allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, allocation->GetMemory());
7889 VMA_ASSERT(allocator);
7891 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7893 allocator->UnmapPersistentlyMappedMemory();
7898 VMA_ASSERT(allocator);
7900 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7902 return allocator->MapPersistentlyMappedMemory();
7906 VmaAllocator allocator,
7907 VmaAllocation* pAllocations,
7908 size_t allocationCount,
7909 VkBool32* pAllocationsChanged,
7913 VMA_ASSERT(allocator && pAllocations);
7915 VMA_DEBUG_LOG(
"vmaDefragment");
7917 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7919 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
7923 VmaAllocator allocator,
7924 const VkBufferCreateInfo* pBufferCreateInfo,
7927 VmaAllocation* pAllocation,
7930 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
7932 VMA_DEBUG_LOG(
"vmaCreateBuffer");
7934 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7936 *pBuffer = VK_NULL_HANDLE;
7937 *pAllocation = VK_NULL_HANDLE;
7940 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
7941 allocator->m_hDevice,
7943 allocator->GetAllocationCallbacks(),
7948 VkMemoryRequirements vkMemReq = {};
7949 bool requiresDedicatedAllocation =
false;
7950 bool prefersDedicatedAllocation =
false;
7951 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
7952 requiresDedicatedAllocation, prefersDedicatedAllocation);
7955 res = allocator->AllocateMemory(
7957 requiresDedicatedAllocation,
7958 prefersDedicatedAllocation,
7961 *pAllocationCreateInfo,
7962 VMA_SUBALLOCATION_TYPE_BUFFER,
7967 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
7968 allocator->m_hDevice,
7970 (*pAllocation)->GetMemory(),
7971 (*pAllocation)->GetOffset());
7975 if(pAllocationInfo != VMA_NULL)
7977 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7981 allocator->FreeMemory(*pAllocation);
7982 *pAllocation = VK_NULL_HANDLE;
7985 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
7986 *pBuffer = VK_NULL_HANDLE;
7993 VmaAllocator allocator,
7995 VmaAllocation allocation)
7997 if(buffer != VK_NULL_HANDLE)
7999 VMA_ASSERT(allocator);
8001 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8003 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8005 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8007 allocator->FreeMemory(allocation);
8012 VmaAllocator allocator,
8013 const VkImageCreateInfo* pImageCreateInfo,
8016 VmaAllocation* pAllocation,
8019 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8021 VMA_DEBUG_LOG(
"vmaCreateImage");
8023 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8025 *pImage = VK_NULL_HANDLE;
8026 *pAllocation = VK_NULL_HANDLE;
8029 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8030 allocator->m_hDevice,
8032 allocator->GetAllocationCallbacks(),
8036 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8037 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8038 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8041 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8045 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8046 allocator->m_hDevice,
8048 (*pAllocation)->GetMemory(),
8049 (*pAllocation)->GetOffset());
8053 if(pAllocationInfo != VMA_NULL)
8055 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8059 allocator->FreeMemory(*pAllocation);
8060 *pAllocation = VK_NULL_HANDLE;
8063 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8064 *pImage = VK_NULL_HANDLE;
8071 VmaAllocator allocator,
8073 VmaAllocation allocation)
8075 if(image != VK_NULL_HANDLE)
8077 VMA_ASSERT(allocator);
8079 VMA_DEBUG_LOG(
"vmaDestroyImage");
8081 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8083 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8085 allocator->FreeMemory(allocation);
8089 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:486
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:703
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:511
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:496
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:677
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:490
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:962
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:508
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1115
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:832
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:886
Definition: vk_mem_alloc.h:741
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:479
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:774
Definition: vk_mem_alloc.h:687
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:523
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:570
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:505
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:520
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:691
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:635
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:493
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:634
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:501
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1119
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:540
VmaStatInfo total
Definition: vk_mem_alloc.h:644
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1127
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:757
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1110
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:494
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:415
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:514
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:840
Definition: vk_mem_alloc.h:834
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:972
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:491
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:776
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:856
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:892
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:477
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:843
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
VmaMemoryUsage
Definition: vk_mem_alloc.h:672
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1105
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1123
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:683
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:492
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:640
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:421
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:442
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:447
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1125
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:768
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:902
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:487
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:623
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:851
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:434
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:748
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:636
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:438
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:846
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:686
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:763
Definition: vk_mem_alloc.h:754
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:626
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:489
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:864
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:526
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:895
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:752
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:781
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:558
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:642
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:635
Definition: vk_mem_alloc.h:814
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:498
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:436
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:497
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:878
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:983
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:517
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:635
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:632
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:883
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:967
Definition: vk_mem_alloc.h:750
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1121
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:485
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:500
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:630
No intended memory usage specified. Use other members of VmaAllocationCreateInfo to specify your requ...
Definition: vk_mem_alloc.h:675
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:836
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:628
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:495
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:499
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:714
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:680
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:978
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:475
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:488
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:948
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:730
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:805
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:636
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:643
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:889
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:636
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:953