23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 387 #include <vulkan/vulkan.h> 394 VK_DEFINE_HANDLE(VmaAllocator)
398 VmaAllocator allocator,
400 VkDeviceMemory memory,
404 VmaAllocator allocator,
406 VkDeviceMemory memory,
522 VmaAllocator* pAllocator);
526 VmaAllocator allocator);
533 VmaAllocator allocator,
534 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
541 VmaAllocator allocator,
542 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
551 VmaAllocator allocator,
552 uint32_t memoryTypeIndex,
553 VkMemoryPropertyFlags* pFlags);
564 VmaAllocator allocator,
565 uint32_t frameIndex);
593 VmaAllocator allocator,
596 #define VMA_STATS_STRING_ENABLED 1 598 #if VMA_STATS_STRING_ENABLED 604 VmaAllocator allocator,
605 char** ppStatsString,
606 VkBool32 detailedMap);
609 VmaAllocator allocator,
612 #endif // #if VMA_STATS_STRING_ENABLED 621 VK_DEFINE_HANDLE(VmaPool)
744 VmaAllocator allocator,
745 uint32_t memoryTypeBits,
747 uint32_t* pMemoryTypeIndex);
864 VmaAllocator allocator,
871 VmaAllocator allocator,
881 VmaAllocator allocator,
892 VmaAllocator allocator,
894 size_t* pLostAllocationCount);
896 VK_DEFINE_HANDLE(VmaAllocation)
949 VmaAllocator allocator,
950 const VkMemoryRequirements* pVkMemoryRequirements,
952 VmaAllocation* pAllocation,
962 VmaAllocator allocator,
965 VmaAllocation* pAllocation,
970 VmaAllocator allocator,
973 VmaAllocation* pAllocation,
978 VmaAllocator allocator,
979 VmaAllocation allocation);
983 VmaAllocator allocator,
984 VmaAllocation allocation,
989 VmaAllocator allocator,
990 VmaAllocation allocation,
1004 VmaAllocator allocator,
1005 VmaAllocation* pAllocation);
1016 VmaAllocator allocator,
1017 VmaAllocation allocation,
1021 VmaAllocator allocator,
1022 VmaAllocation allocation);
1153 VmaAllocator allocator,
1154 VmaAllocation* pAllocations,
1155 size_t allocationCount,
1156 VkBool32* pAllocationsChanged,
1186 VmaAllocator allocator,
1187 const VkBufferCreateInfo* pBufferCreateInfo,
1190 VmaAllocation* pAllocation,
1202 VmaAllocator allocator,
1204 VmaAllocation allocation);
1208 VmaAllocator allocator,
1209 const VkImageCreateInfo* pImageCreateInfo,
1212 VmaAllocation* pAllocation,
1224 VmaAllocator allocator,
1226 VmaAllocation allocation);
1230 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1233 #ifdef __INTELLISENSE__ 1234 #define VMA_IMPLEMENTATION 1237 #ifdef VMA_IMPLEMENTATION 1238 #undef VMA_IMPLEMENTATION 1260 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1261 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1273 #if VMA_USE_STL_CONTAINERS 1274 #define VMA_USE_STL_VECTOR 1 1275 #define VMA_USE_STL_UNORDERED_MAP 1 1276 #define VMA_USE_STL_LIST 1 1279 #if VMA_USE_STL_VECTOR 1283 #if VMA_USE_STL_UNORDERED_MAP 1284 #include <unordered_map> 1287 #if VMA_USE_STL_LIST 1296 #include <algorithm> 1300 #if !defined(_WIN32) 1307 #define VMA_ASSERT(expr) assert(expr) 1309 #define VMA_ASSERT(expr) 1315 #ifndef VMA_HEAVY_ASSERT 1317 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1319 #define VMA_HEAVY_ASSERT(expr) 1325 #define VMA_NULL nullptr 1328 #ifndef VMA_ALIGN_OF 1329 #define VMA_ALIGN_OF(type) (__alignof(type)) 1332 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1334 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1336 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1340 #ifndef VMA_SYSTEM_FREE 1342 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1344 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1349 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1353 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1357 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1361 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1364 #ifndef VMA_DEBUG_LOG 1365 #define VMA_DEBUG_LOG(format, ...) 1375 #if VMA_STATS_STRING_ENABLED 1376 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1378 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1380 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1382 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1384 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1386 snprintf(outStr, strLen,
"%p", ptr);
1396 void Lock() { m_Mutex.lock(); }
1397 void Unlock() { m_Mutex.unlock(); }
1401 #define VMA_MUTEX VmaMutex 1412 #ifndef VMA_ATOMIC_UINT32 1413 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1416 #ifndef VMA_BEST_FIT 1429 #define VMA_BEST_FIT (1) 1432 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 1437 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 1440 #ifndef VMA_DEBUG_ALIGNMENT 1445 #define VMA_DEBUG_ALIGNMENT (1) 1448 #ifndef VMA_DEBUG_MARGIN 1453 #define VMA_DEBUG_MARGIN (0) 1456 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1461 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1464 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1469 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1472 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1473 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1477 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1478 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1482 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1483 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1487 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1493 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1494 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1497 static inline uint32_t CountBitsSet(uint32_t v)
1499 uint32_t c = v - ((v >> 1) & 0x55555555);
1500 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1501 c = ((c >> 4) + c) & 0x0F0F0F0F;
1502 c = ((c >> 8) + c) & 0x00FF00FF;
1503 c = ((c >> 16) + c) & 0x0000FFFF;
1509 template <
typename T>
1510 static inline T VmaAlignUp(T val, T align)
1512 return (val + align - 1) / align * align;
1516 template <
typename T>
1517 inline T VmaRoundDiv(T x, T y)
1519 return (x + (y / (T)2)) / y;
1524 template<
typename Iterator,
typename Compare>
1525 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1527 Iterator centerValue = end; --centerValue;
1528 Iterator insertIndex = beg;
1529 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1531 if(cmp(*memTypeIndex, *centerValue))
1533 if(insertIndex != memTypeIndex)
1535 VMA_SWAP(*memTypeIndex, *insertIndex);
1540 if(insertIndex != centerValue)
1542 VMA_SWAP(*insertIndex, *centerValue);
1547 template<
typename Iterator,
typename Compare>
1548 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1552 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1553 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1554 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1558 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1560 #endif // #ifndef VMA_SORT 1569 static inline bool VmaBlocksOnSamePage(
1570 VkDeviceSize resourceAOffset,
1571 VkDeviceSize resourceASize,
1572 VkDeviceSize resourceBOffset,
1573 VkDeviceSize pageSize)
1575 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1576 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1577 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1578 VkDeviceSize resourceBStart = resourceBOffset;
1579 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1580 return resourceAEndPage == resourceBStartPage;
1583 enum VmaSuballocationType
1585 VMA_SUBALLOCATION_TYPE_FREE = 0,
1586 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1587 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1588 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1589 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1590 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1591 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1600 static inline bool VmaIsBufferImageGranularityConflict(
1601 VmaSuballocationType suballocType1,
1602 VmaSuballocationType suballocType2)
1604 if(suballocType1 > suballocType2)
1606 VMA_SWAP(suballocType1, suballocType2);
1609 switch(suballocType1)
1611 case VMA_SUBALLOCATION_TYPE_FREE:
1613 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1615 case VMA_SUBALLOCATION_TYPE_BUFFER:
1617 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1618 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1619 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1621 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1622 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1623 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1624 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1626 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1627 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1639 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1640 m_pMutex(useMutex ? &mutex : VMA_NULL)
1657 VMA_MUTEX* m_pMutex;
1660 #if VMA_DEBUG_GLOBAL_MUTEX 1661 static VMA_MUTEX gDebugGlobalMutex;
1662 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1664 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1668 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1679 template <
typename IterT,
typename KeyT,
typename CmpT>
1680 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1682 size_t down = 0, up = (end - beg);
1685 const size_t mid = (down + up) / 2;
1686 if(cmp(*(beg+mid), key))
1701 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1703 if((pAllocationCallbacks != VMA_NULL) &&
1704 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1706 return (*pAllocationCallbacks->pfnAllocation)(
1707 pAllocationCallbacks->pUserData,
1710 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1714 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1718 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1720 if((pAllocationCallbacks != VMA_NULL) &&
1721 (pAllocationCallbacks->pfnFree != VMA_NULL))
1723 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1727 VMA_SYSTEM_FREE(ptr);
1731 template<
typename T>
1732 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1734 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1737 template<
typename T>
1738 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1740 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1743 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1745 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1747 template<
typename T>
1748 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1751 VmaFree(pAllocationCallbacks, ptr);
1754 template<
typename T>
1755 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1759 for(
size_t i = count; i--; )
1763 VmaFree(pAllocationCallbacks, ptr);
1768 template<
typename T>
1769 class VmaStlAllocator
1772 const VkAllocationCallbacks*
const m_pCallbacks;
1773 typedef T value_type;
1775 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1776 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1778 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1779 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1781 template<
typename U>
1782 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1784 return m_pCallbacks == rhs.m_pCallbacks;
1786 template<
typename U>
1787 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1789 return m_pCallbacks != rhs.m_pCallbacks;
1792 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1795 #if VMA_USE_STL_VECTOR 1797 #define VmaVector std::vector 1799 template<
typename T,
typename allocatorT>
1800 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1802 vec.insert(vec.begin() + index, item);
1805 template<
typename T,
typename allocatorT>
1806 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1808 vec.erase(vec.begin() + index);
1811 #else // #if VMA_USE_STL_VECTOR 1816 template<
typename T,
typename AllocatorT>
1820 typedef T value_type;
1822 VmaVector(
const AllocatorT& allocator) :
1823 m_Allocator(allocator),
1830 VmaVector(
size_t count,
const AllocatorT& allocator) :
1831 m_Allocator(allocator),
1832 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1838 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1839 m_Allocator(src.m_Allocator),
1840 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
1841 m_Count(src.m_Count),
1842 m_Capacity(src.m_Count)
1846 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1852 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1855 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1859 resize(rhs.m_Count);
1862 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1868 bool empty()
const {
return m_Count == 0; }
1869 size_t size()
const {
return m_Count; }
1870 T* data() {
return m_pArray; }
1871 const T* data()
const {
return m_pArray; }
1873 T& operator[](
size_t index)
1875 VMA_HEAVY_ASSERT(index < m_Count);
1876 return m_pArray[index];
1878 const T& operator[](
size_t index)
const 1880 VMA_HEAVY_ASSERT(index < m_Count);
1881 return m_pArray[index];
1886 VMA_HEAVY_ASSERT(m_Count > 0);
1889 const T& front()
const 1891 VMA_HEAVY_ASSERT(m_Count > 0);
1896 VMA_HEAVY_ASSERT(m_Count > 0);
1897 return m_pArray[m_Count - 1];
1899 const T& back()
const 1901 VMA_HEAVY_ASSERT(m_Count > 0);
1902 return m_pArray[m_Count - 1];
1905 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1907 newCapacity = VMA_MAX(newCapacity, m_Count);
1909 if((newCapacity < m_Capacity) && !freeMemory)
1911 newCapacity = m_Capacity;
1914 if(newCapacity != m_Capacity)
1916 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1919 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1921 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1922 m_Capacity = newCapacity;
1923 m_pArray = newArray;
1927 void resize(
size_t newCount,
bool freeMemory =
false)
1929 size_t newCapacity = m_Capacity;
1930 if(newCount > m_Capacity)
1932 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1936 newCapacity = newCount;
1939 if(newCapacity != m_Capacity)
1941 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1942 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1943 if(elementsToCopy != 0)
1945 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1947 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1948 m_Capacity = newCapacity;
1949 m_pArray = newArray;
1955 void clear(
bool freeMemory =
false)
1957 resize(0, freeMemory);
1960 void insert(
size_t index,
const T& src)
1962 VMA_HEAVY_ASSERT(index <= m_Count);
1963 const size_t oldCount = size();
1964 resize(oldCount + 1);
1965 if(index < oldCount)
1967 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1969 m_pArray[index] = src;
1972 void remove(
size_t index)
1974 VMA_HEAVY_ASSERT(index < m_Count);
1975 const size_t oldCount = size();
1976 if(index < oldCount - 1)
1978 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1980 resize(oldCount - 1);
1983 void push_back(
const T& src)
1985 const size_t newIndex = size();
1986 resize(newIndex + 1);
1987 m_pArray[newIndex] = src;
1992 VMA_HEAVY_ASSERT(m_Count > 0);
1996 void push_front(
const T& src)
2003 VMA_HEAVY_ASSERT(m_Count > 0);
2007 typedef T* iterator;
2009 iterator begin() {
return m_pArray; }
2010 iterator end() {
return m_pArray + m_Count; }
2013 AllocatorT m_Allocator;
2019 template<
typename T,
typename allocatorT>
2020 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2022 vec.insert(index, item);
2025 template<
typename T,
typename allocatorT>
2026 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2031 #endif // #if VMA_USE_STL_VECTOR 2033 template<
typename CmpLess,
typename VectorT>
2034 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2036 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2038 vector.data() + vector.size(),
2040 CmpLess()) - vector.data();
2041 VmaVectorInsert(vector, indexToInsert, value);
2042 return indexToInsert;
2045 template<
typename CmpLess,
typename VectorT>
2046 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2049 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2054 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2056 size_t indexToRemove = it - vector.begin();
2057 VmaVectorRemove(vector, indexToRemove);
2063 template<
typename CmpLess,
typename VectorT>
2064 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2067 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2069 vector.data() + vector.size(),
2072 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2074 return it - vector.begin();
2078 return vector.size();
2090 template<
typename T>
2091 class VmaPoolAllocator
2094 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2095 ~VmaPoolAllocator();
2103 uint32_t NextFreeIndex;
2110 uint32_t FirstFreeIndex;
2113 const VkAllocationCallbacks* m_pAllocationCallbacks;
2114 size_t m_ItemsPerBlock;
2115 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2117 ItemBlock& CreateNewBlock();
2120 template<
typename T>
2121 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2122 m_pAllocationCallbacks(pAllocationCallbacks),
2123 m_ItemsPerBlock(itemsPerBlock),
2124 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2126 VMA_ASSERT(itemsPerBlock > 0);
2129 template<
typename T>
2130 VmaPoolAllocator<T>::~VmaPoolAllocator()
2135 template<
typename T>
2136 void VmaPoolAllocator<T>::Clear()
2138 for(
size_t i = m_ItemBlocks.size(); i--; )
2139 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2140 m_ItemBlocks.clear();
2143 template<
typename T>
2144 T* VmaPoolAllocator<T>::Alloc()
2146 for(
size_t i = m_ItemBlocks.size(); i--; )
2148 ItemBlock& block = m_ItemBlocks[i];
2150 if(block.FirstFreeIndex != UINT32_MAX)
2152 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2153 block.FirstFreeIndex = pItem->NextFreeIndex;
2154 return &pItem->Value;
2159 ItemBlock& newBlock = CreateNewBlock();
2160 Item*
const pItem = &newBlock.pItems[0];
2161 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2162 return &pItem->Value;
2165 template<
typename T>
2166 void VmaPoolAllocator<T>::Free(T* ptr)
2169 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2171 ItemBlock& block = m_ItemBlocks[i];
2175 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2178 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2180 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2181 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2182 block.FirstFreeIndex = index;
2186 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2189 template<
typename T>
2190 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2192 ItemBlock newBlock = {
2193 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2195 m_ItemBlocks.push_back(newBlock);
2198 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2199 newBlock.pItems[i].NextFreeIndex = i + 1;
2200 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2201 return m_ItemBlocks.back();
2207 #if VMA_USE_STL_LIST 2209 #define VmaList std::list 2211 #else // #if VMA_USE_STL_LIST 2213 template<
typename T>
2222 template<
typename T>
2226 typedef VmaListItem<T> ItemType;
2228 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2232 size_t GetCount()
const {
return m_Count; }
2233 bool IsEmpty()
const {
return m_Count == 0; }
2235 ItemType* Front() {
return m_pFront; }
2236 const ItemType* Front()
const {
return m_pFront; }
2237 ItemType* Back() {
return m_pBack; }
2238 const ItemType* Back()
const {
return m_pBack; }
2240 ItemType* PushBack();
2241 ItemType* PushFront();
2242 ItemType* PushBack(
const T& value);
2243 ItemType* PushFront(
const T& value);
2248 ItemType* InsertBefore(ItemType* pItem);
2250 ItemType* InsertAfter(ItemType* pItem);
2252 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2253 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2255 void Remove(ItemType* pItem);
2258 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2259 VmaPoolAllocator<ItemType> m_ItemAllocator;
2265 VmaRawList(
const VmaRawList<T>& src);
2266 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2269 template<
typename T>
2270 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2271 m_pAllocationCallbacks(pAllocationCallbacks),
2272 m_ItemAllocator(pAllocationCallbacks, 128),
2279 template<
typename T>
2280 VmaRawList<T>::~VmaRawList()
2286 template<
typename T>
2287 void VmaRawList<T>::Clear()
2289 if(IsEmpty() ==
false)
2291 ItemType* pItem = m_pBack;
2292 while(pItem != VMA_NULL)
2294 ItemType*
const pPrevItem = pItem->pPrev;
2295 m_ItemAllocator.Free(pItem);
2298 m_pFront = VMA_NULL;
2304 template<
typename T>
2305 VmaListItem<T>* VmaRawList<T>::PushBack()
2307 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2308 pNewItem->pNext = VMA_NULL;
2311 pNewItem->pPrev = VMA_NULL;
2312 m_pFront = pNewItem;
2318 pNewItem->pPrev = m_pBack;
2319 m_pBack->pNext = pNewItem;
2326 template<
typename T>
2327 VmaListItem<T>* VmaRawList<T>::PushFront()
2329 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2330 pNewItem->pPrev = VMA_NULL;
2333 pNewItem->pNext = VMA_NULL;
2334 m_pFront = pNewItem;
2340 pNewItem->pNext = m_pFront;
2341 m_pFront->pPrev = pNewItem;
2342 m_pFront = pNewItem;
2348 template<
typename T>
2349 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2351 ItemType*
const pNewItem = PushBack();
2352 pNewItem->Value = value;
2356 template<
typename T>
2357 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2359 ItemType*
const pNewItem = PushFront();
2360 pNewItem->Value = value;
2364 template<
typename T>
2365 void VmaRawList<T>::PopBack()
2367 VMA_HEAVY_ASSERT(m_Count > 0);
2368 ItemType*
const pBackItem = m_pBack;
2369 ItemType*
const pPrevItem = pBackItem->pPrev;
2370 if(pPrevItem != VMA_NULL)
2372 pPrevItem->pNext = VMA_NULL;
2374 m_pBack = pPrevItem;
2375 m_ItemAllocator.Free(pBackItem);
2379 template<
typename T>
2380 void VmaRawList<T>::PopFront()
2382 VMA_HEAVY_ASSERT(m_Count > 0);
2383 ItemType*
const pFrontItem = m_pFront;
2384 ItemType*
const pNextItem = pFrontItem->pNext;
2385 if(pNextItem != VMA_NULL)
2387 pNextItem->pPrev = VMA_NULL;
2389 m_pFront = pNextItem;
2390 m_ItemAllocator.Free(pFrontItem);
2394 template<
typename T>
2395 void VmaRawList<T>::Remove(ItemType* pItem)
2397 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2398 VMA_HEAVY_ASSERT(m_Count > 0);
2400 if(pItem->pPrev != VMA_NULL)
2402 pItem->pPrev->pNext = pItem->pNext;
2406 VMA_HEAVY_ASSERT(m_pFront == pItem);
2407 m_pFront = pItem->pNext;
2410 if(pItem->pNext != VMA_NULL)
2412 pItem->pNext->pPrev = pItem->pPrev;
2416 VMA_HEAVY_ASSERT(m_pBack == pItem);
2417 m_pBack = pItem->pPrev;
2420 m_ItemAllocator.Free(pItem);
2424 template<
typename T>
2425 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2427 if(pItem != VMA_NULL)
2429 ItemType*
const prevItem = pItem->pPrev;
2430 ItemType*
const newItem = m_ItemAllocator.Alloc();
2431 newItem->pPrev = prevItem;
2432 newItem->pNext = pItem;
2433 pItem->pPrev = newItem;
2434 if(prevItem != VMA_NULL)
2436 prevItem->pNext = newItem;
2440 VMA_HEAVY_ASSERT(m_pFront == pItem);
2450 template<
typename T>
2451 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2453 if(pItem != VMA_NULL)
2455 ItemType*
const nextItem = pItem->pNext;
2456 ItemType*
const newItem = m_ItemAllocator.Alloc();
2457 newItem->pNext = nextItem;
2458 newItem->pPrev = pItem;
2459 pItem->pNext = newItem;
2460 if(nextItem != VMA_NULL)
2462 nextItem->pPrev = newItem;
2466 VMA_HEAVY_ASSERT(m_pBack == pItem);
2476 template<
typename T>
2477 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2479 ItemType*
const newItem = InsertBefore(pItem);
2480 newItem->Value = value;
2484 template<
typename T>
2485 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2487 ItemType*
const newItem = InsertAfter(pItem);
2488 newItem->Value = value;
2492 template<
typename T,
typename AllocatorT>
2505 T& operator*()
const 2507 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2508 return m_pItem->Value;
2510 T* operator->()
const 2512 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2513 return &m_pItem->Value;
2516 iterator& operator++()
2518 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2519 m_pItem = m_pItem->pNext;
2522 iterator& operator--()
2524 if(m_pItem != VMA_NULL)
2526 m_pItem = m_pItem->pPrev;
2530 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2531 m_pItem = m_pList->Back();
2536 iterator operator++(
int)
2538 iterator result = *
this;
2542 iterator operator--(
int)
2544 iterator result = *
this;
2549 bool operator==(
const iterator& rhs)
const 2551 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2552 return m_pItem == rhs.m_pItem;
2554 bool operator!=(
const iterator& rhs)
const 2556 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2557 return m_pItem != rhs.m_pItem;
2561 VmaRawList<T>* m_pList;
2562 VmaListItem<T>* m_pItem;
2564 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2570 friend class VmaList<T, AllocatorT>;
2573 class const_iterator
2582 const_iterator(
const iterator& src) :
2583 m_pList(src.m_pList),
2584 m_pItem(src.m_pItem)
2588 const T& operator*()
const 2590 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2591 return m_pItem->Value;
2593 const T* operator->()
const 2595 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2596 return &m_pItem->Value;
2599 const_iterator& operator++()
2601 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2602 m_pItem = m_pItem->pNext;
2605 const_iterator& operator--()
2607 if(m_pItem != VMA_NULL)
2609 m_pItem = m_pItem->pPrev;
2613 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2614 m_pItem = m_pList->Back();
2619 const_iterator operator++(
int)
2621 const_iterator result = *
this;
2625 const_iterator operator--(
int)
2627 const_iterator result = *
this;
2632 bool operator==(
const const_iterator& rhs)
const 2634 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2635 return m_pItem == rhs.m_pItem;
2637 bool operator!=(
const const_iterator& rhs)
const 2639 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2640 return m_pItem != rhs.m_pItem;
2644 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2650 const VmaRawList<T>* m_pList;
2651 const VmaListItem<T>* m_pItem;
2653 friend class VmaList<T, AllocatorT>;
2656 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2658 bool empty()
const {
return m_RawList.IsEmpty(); }
2659 size_t size()
const {
return m_RawList.GetCount(); }
2661 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2662 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2664 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2665 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2667 void clear() { m_RawList.Clear(); }
2668 void push_back(
const T& value) { m_RawList.PushBack(value); }
2669 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2670 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2673 VmaRawList<T> m_RawList;
2676 #endif // #if VMA_USE_STL_LIST 2684 #if VMA_USE_STL_UNORDERED_MAP 2686 #define VmaPair std::pair 2688 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2689 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2691 #else // #if VMA_USE_STL_UNORDERED_MAP 2693 template<
typename T1,
typename T2>
2699 VmaPair() : first(), second() { }
2700 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2706 template<
typename KeyT,
typename ValueT>
2710 typedef VmaPair<KeyT, ValueT> PairType;
2711 typedef PairType* iterator;
2713 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2715 iterator begin() {
return m_Vector.begin(); }
2716 iterator end() {
return m_Vector.end(); }
2718 void insert(
const PairType& pair);
2719 iterator find(
const KeyT& key);
2720 void erase(iterator it);
2723 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2726 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2728 template<
typename FirstT,
typename SecondT>
2729 struct VmaPairFirstLess
2731 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2733 return lhs.first < rhs.first;
2735 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2737 return lhs.first < rhsFirst;
2741 template<
typename KeyT,
typename ValueT>
2742 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2744 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2746 m_Vector.data() + m_Vector.size(),
2748 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2749 VmaVectorInsert(m_Vector, indexToInsert, pair);
2752 template<
typename KeyT,
typename ValueT>
2753 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2755 PairType* it = VmaBinaryFindFirstNotLess(
2757 m_Vector.data() + m_Vector.size(),
2759 VmaPairFirstLess<KeyT, ValueT>());
2760 if((it != m_Vector.end()) && (it->first == key))
2766 return m_Vector.end();
2770 template<
typename KeyT,
typename ValueT>
2771 void VmaMap<KeyT, ValueT>::erase(iterator it)
2773 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2776 #endif // #if VMA_USE_STL_UNORDERED_MAP 2782 class VmaDeviceMemoryBlock;
2784 enum VMA_BLOCK_VECTOR_TYPE
2786 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2787 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2788 VMA_BLOCK_VECTOR_TYPE_COUNT
2794 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2795 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2798 struct VmaAllocation_T
2801 enum ALLOCATION_TYPE
2803 ALLOCATION_TYPE_NONE,
2804 ALLOCATION_TYPE_BLOCK,
2805 ALLOCATION_TYPE_OWN,
2808 VmaAllocation_T(uint32_t currentFrameIndex) :
2811 m_pUserData(VMA_NULL),
2812 m_Type(ALLOCATION_TYPE_NONE),
2813 m_SuballocationType(VMA_SUBALLOCATION_TYPE_UNKNOWN),
2814 m_LastUseFrameIndex(currentFrameIndex)
2818 void InitBlockAllocation(
2820 VmaDeviceMemoryBlock* block,
2821 VkDeviceSize offset,
2822 VkDeviceSize alignment,
2824 VmaSuballocationType suballocationType,
2828 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2829 VMA_ASSERT(block != VMA_NULL);
2830 m_Type = ALLOCATION_TYPE_BLOCK;
2831 m_Alignment = alignment;
2833 m_pUserData = pUserData;
2834 m_SuballocationType = suballocationType;
2835 m_BlockAllocation.m_hPool = hPool;
2836 m_BlockAllocation.m_Block = block;
2837 m_BlockAllocation.m_Offset = offset;
2838 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
2843 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2844 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
2845 m_Type = ALLOCATION_TYPE_BLOCK;
2846 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
2847 m_BlockAllocation.m_Block = VMA_NULL;
2848 m_BlockAllocation.m_Offset = 0;
2849 m_BlockAllocation.m_CanBecomeLost =
true;
2852 void ChangeBlockAllocation(
2853 VmaDeviceMemoryBlock* block,
2854 VkDeviceSize offset)
2856 VMA_ASSERT(block != VMA_NULL);
2857 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2858 m_BlockAllocation.m_Block = block;
2859 m_BlockAllocation.m_Offset = offset;
2862 void InitOwnAllocation(
2863 uint32_t memoryTypeIndex,
2864 VkDeviceMemory hMemory,
2865 VmaSuballocationType suballocationType,
2871 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2872 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2873 m_Type = ALLOCATION_TYPE_OWN;
2876 m_pUserData = pUserData;
2877 m_SuballocationType = suballocationType;
2878 m_OwnAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2879 m_OwnAllocation.m_hMemory = hMemory;
2880 m_OwnAllocation.m_PersistentMap = persistentMap;
2881 m_OwnAllocation.m_pMappedData = pMappedData;
2884 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2885 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2886 VkDeviceSize GetSize()
const {
return m_Size; }
2887 void* GetUserData()
const {
return m_pUserData; }
2888 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2889 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2891 VmaDeviceMemoryBlock* GetBlock()
const 2893 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2894 return m_BlockAllocation.m_Block;
2896 VkDeviceSize GetOffset()
const;
2897 VkDeviceMemory GetMemory()
const;
2898 uint32_t GetMemoryTypeIndex()
const;
2899 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2900 void* GetMappedData()
const;
2901 bool CanBecomeLost()
const;
2902 VmaPool GetPool()
const;
2904 VkResult OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator);
2905 void OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator);
2907 uint32_t GetLastUseFrameIndex()
const 2909 return m_LastUseFrameIndex.load();
2911 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
2913 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
2923 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
2927 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2939 VkDeviceSize m_Alignment;
2940 VkDeviceSize m_Size;
2942 ALLOCATION_TYPE m_Type;
2943 VmaSuballocationType m_SuballocationType;
2944 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
2947 struct BlockAllocation
2950 VmaDeviceMemoryBlock* m_Block;
2951 VkDeviceSize m_Offset;
2952 bool m_CanBecomeLost;
2956 struct OwnAllocation
2958 uint32_t m_MemoryTypeIndex;
2959 VkDeviceMemory m_hMemory;
2960 bool m_PersistentMap;
2961 void* m_pMappedData;
2967 BlockAllocation m_BlockAllocation;
2969 OwnAllocation m_OwnAllocation;
2977 struct VmaSuballocation
2979 VkDeviceSize offset;
2981 VmaAllocation hAllocation;
2982 VmaSuballocationType type;
2985 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
2988 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3003 struct VmaAllocationRequest
3005 VkDeviceSize offset;
3006 VkDeviceSize sumFreeSize;
3007 VkDeviceSize sumItemSize;
3008 VmaSuballocationList::iterator item;
3009 size_t itemsToMakeLostCount;
3011 VkDeviceSize CalcCost()
const 3013 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3023 class VmaDeviceMemoryBlock
3026 uint32_t m_MemoryTypeIndex;
3027 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3028 VkDeviceMemory m_hMemory;
3029 VkDeviceSize m_Size;
3030 bool m_PersistentMap;
3031 void* m_pMappedData;
3032 uint32_t m_FreeCount;
3033 VkDeviceSize m_SumFreeSize;
3034 VmaSuballocationList m_Suballocations;
3037 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3039 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3041 ~VmaDeviceMemoryBlock()
3043 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3048 uint32_t newMemoryTypeIndex,
3049 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
3050 VkDeviceMemory newMemory,
3051 VkDeviceSize newSize,
3055 void Destroy(VmaAllocator allocator);
3058 bool Validate()
const;
3060 VkDeviceSize GetUnusedRangeSizeMax()
const;
3065 bool CreateAllocationRequest(
3066 uint32_t currentFrameIndex,
3067 uint32_t frameInUseCount,
3068 VkDeviceSize bufferImageGranularity,
3069 VkDeviceSize allocSize,
3070 VkDeviceSize allocAlignment,
3071 VmaSuballocationType allocType,
3072 bool canMakeOtherLost,
3073 VmaAllocationRequest* pAllocationRequest);
3075 bool MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest);
3077 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3080 bool IsEmpty()
const;
3085 const VmaAllocationRequest& request,
3086 VmaSuballocationType type,
3087 VkDeviceSize allocSize,
3088 VmaAllocation hAllocation);
3091 void Free(
const VmaAllocation allocation);
3093 #if VMA_STATS_STRING_ENABLED 3094 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3100 bool CheckAllocation(
3101 uint32_t currentFrameIndex,
3102 uint32_t frameInUseCount,
3103 VkDeviceSize bufferImageGranularity,
3104 VkDeviceSize allocSize,
3105 VkDeviceSize allocAlignment,
3106 VmaSuballocationType allocType,
3107 VmaSuballocationList::const_iterator suballocItem,
3108 bool canMakeOtherLost,
3109 VkDeviceSize* pOffset,
3110 size_t* itemsToMakeLostCount,
3111 VkDeviceSize* pSumFreeSize,
3112 VkDeviceSize* pSumItemSize)
const;
3115 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3119 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3122 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3125 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3127 bool ValidateFreeSuballocationList()
const;
3130 struct VmaPointerLess
3132 bool operator()(
const void* lhs,
const void* rhs)
const 3138 class VmaDefragmentator;
3146 struct VmaBlockVector
3149 VmaAllocator hAllocator,
3150 uint32_t memoryTypeIndex,
3151 VMA_BLOCK_VECTOR_TYPE blockVectorType,
3152 VkDeviceSize preferredBlockSize,
3153 size_t minBlockCount,
3154 size_t maxBlockCount,
3155 VkDeviceSize bufferImageGranularity,
3156 uint32_t frameInUseCount,
3160 VkResult CreateMinBlocks();
3162 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3163 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3164 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3165 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3166 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const {
return m_BlockVectorType; }
3170 bool IsEmpty()
const {
return m_Blocks.empty(); }
3173 VmaPool hCurrentPool,
3174 uint32_t currentFrameIndex,
3175 const VkMemoryRequirements& vkMemReq,
3177 VmaSuballocationType suballocType,
3178 VmaAllocation* pAllocation);
3181 VmaAllocation hAllocation);
3186 #if VMA_STATS_STRING_ENABLED 3187 void PrintDetailedMap(
class VmaJsonWriter& json);
3190 void UnmapPersistentlyMappedMemory();
3191 VkResult MapPersistentlyMappedMemory();
3193 void MakePoolAllocationsLost(
3194 uint32_t currentFrameIndex,
3195 size_t* pLostAllocationCount);
3197 VmaDefragmentator* EnsureDefragmentator(
3198 VmaAllocator hAllocator,
3199 uint32_t currentFrameIndex);
3201 VkResult Defragment(
3203 VkDeviceSize& maxBytesToMove,
3204 uint32_t& maxAllocationsToMove);
3206 void DestroyDefragmentator();
3209 friend class VmaDefragmentator;
3211 const VmaAllocator m_hAllocator;
3212 const uint32_t m_MemoryTypeIndex;
3213 const VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3214 const VkDeviceSize m_PreferredBlockSize;
3215 const size_t m_MinBlockCount;
3216 const size_t m_MaxBlockCount;
3217 const VkDeviceSize m_BufferImageGranularity;
3218 const uint32_t m_FrameInUseCount;
3219 const bool m_IsCustomPool;
3222 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3226 bool m_HasEmptyBlock;
3227 VmaDefragmentator* m_pDefragmentator;
3230 void Remove(VmaDeviceMemoryBlock* pBlock);
3234 void IncrementallySortBlocks();
3236 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3242 VmaBlockVector m_BlockVector;
3246 VmaAllocator hAllocator,
3250 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3252 #if VMA_STATS_STRING_ENABLED 3257 class VmaDefragmentator
3259 const VmaAllocator m_hAllocator;
3260 VmaBlockVector*
const m_pBlockVector;
3261 uint32_t m_CurrentFrameIndex;
3262 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3263 VkDeviceSize m_BytesMoved;
3264 uint32_t m_AllocationsMoved;
3266 struct AllocationInfo
3268 VmaAllocation m_hAllocation;
3269 VkBool32* m_pChanged;
3272 m_hAllocation(VK_NULL_HANDLE),
3273 m_pChanged(VMA_NULL)
3278 struct AllocationInfoSizeGreater
3280 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3282 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3287 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3291 VmaDeviceMemoryBlock* m_pBlock;
3292 bool m_HasNonMovableAllocations;
3293 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3295 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3297 m_HasNonMovableAllocations(true),
3298 m_Allocations(pAllocationCallbacks),
3299 m_pMappedDataForDefragmentation(VMA_NULL)
3303 void CalcHasNonMovableAllocations()
3305 const size_t blockAllocCount =
3306 m_pBlock->m_Suballocations.size() - m_pBlock->m_FreeCount;
3307 const size_t defragmentAllocCount = m_Allocations.size();
3308 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3311 void SortAllocationsBySizeDescecnding()
3313 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3316 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3317 void Unmap(VmaAllocator hAllocator);
3321 void* m_pMappedDataForDefragmentation;
3324 struct BlockPointerLess
3326 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3328 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3330 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3332 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3338 struct BlockInfoCompareMoveDestination
3340 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3342 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3346 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3350 if(pLhsBlockInfo->m_pBlock->m_SumFreeSize < pRhsBlockInfo->m_pBlock->m_SumFreeSize)
3358 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3359 BlockInfoVector m_Blocks;
3361 VkResult DefragmentRound(
3362 VkDeviceSize maxBytesToMove,
3363 uint32_t maxAllocationsToMove);
3365 static bool MoveMakesSense(
3366 size_t dstBlockIndex, VkDeviceSize dstOffset,
3367 size_t srcBlockIndex, VkDeviceSize srcOffset);
3371 VmaAllocator hAllocator,
3372 VmaBlockVector* pBlockVector,
3373 uint32_t currentFrameIndex);
3375 ~VmaDefragmentator();
3377 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3378 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3380 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3382 VkResult Defragment(
3383 VkDeviceSize maxBytesToMove,
3384 uint32_t maxAllocationsToMove);
3388 struct VmaAllocator_T
3392 bool m_AllocationCallbacksSpecified;
3393 VkAllocationCallbacks m_AllocationCallbacks;
3397 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
3400 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3401 VMA_MUTEX m_HeapSizeLimitMutex;
3403 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3404 VkPhysicalDeviceMemoryProperties m_MemProps;
3407 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3410 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3411 AllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3412 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
3417 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3419 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3423 return m_VulkanFunctions;
3426 VkDeviceSize GetBufferImageGranularity()
const 3429 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3430 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3433 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3434 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3436 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3438 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3439 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3443 VkResult AllocateMemory(
3444 const VkMemoryRequirements& vkMemReq,
3446 VmaSuballocationType suballocType,
3447 VmaAllocation* pAllocation);
3450 void FreeMemory(
const VmaAllocation allocation);
3452 void CalculateStats(
VmaStats* pStats);
3454 #if VMA_STATS_STRING_ENABLED 3455 void PrintDetailedMap(
class VmaJsonWriter& json);
3458 void UnmapPersistentlyMappedMemory();
3459 VkResult MapPersistentlyMappedMemory();
3461 VkResult Defragment(
3462 VmaAllocation* pAllocations,
3463 size_t allocationCount,
3464 VkBool32* pAllocationsChanged,
3468 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3471 void DestroyPool(VmaPool pool);
3472 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3474 void SetCurrentFrameIndex(uint32_t frameIndex);
3476 void MakePoolAllocationsLost(
3478 size_t* pLostAllocationCount);
3480 void CreateLostAllocation(VmaAllocation* pAllocation);
3482 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3483 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3486 VkDeviceSize m_PreferredLargeHeapBlockSize;
3487 VkDeviceSize m_PreferredSmallHeapBlockSize;
3489 VkPhysicalDevice m_PhysicalDevice;
3490 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3492 VMA_MUTEX m_PoolsMutex;
3494 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3500 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3502 VkResult AllocateMemoryOfType(
3503 const VkMemoryRequirements& vkMemReq,
3505 uint32_t memTypeIndex,
3506 VmaSuballocationType suballocType,
3507 VmaAllocation* pAllocation);
3510 VkResult AllocateOwnMemory(
3512 VmaSuballocationType suballocType,
3513 uint32_t memTypeIndex,
3516 VmaAllocation* pAllocation);
3519 void FreeOwnMemory(VmaAllocation allocation);
3525 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3527 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3530 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3532 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3535 template<
typename T>
3536 static T* VmaAllocate(VmaAllocator hAllocator)
3538 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3541 template<
typename T>
3542 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3544 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3547 template<
typename T>
3548 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3553 VmaFree(hAllocator, ptr);
3557 template<
typename T>
3558 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3562 for(
size_t i = count; i--; )
3564 VmaFree(hAllocator, ptr);
3571 #if VMA_STATS_STRING_ENABLED 3573 class VmaStringBuilder
3576 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3577 size_t GetLength()
const {
return m_Data.size(); }
3578 const char* GetData()
const {
return m_Data.data(); }
3580 void Add(
char ch) { m_Data.push_back(ch); }
3581 void Add(
const char* pStr);
3582 void AddNewLine() { Add(
'\n'); }
3583 void AddNumber(uint32_t num);
3584 void AddNumber(uint64_t num);
3585 void AddPointer(
const void* ptr);
3588 VmaVector< char, VmaStlAllocator<char> > m_Data;
3591 void VmaStringBuilder::Add(
const char* pStr)
3593 const size_t strLen = strlen(pStr);
3596 const size_t oldCount = m_Data.size();
3597 m_Data.resize(oldCount + strLen);
3598 memcpy(m_Data.data() + oldCount, pStr, strLen);
3602 void VmaStringBuilder::AddNumber(uint32_t num)
3605 VmaUint32ToStr(buf,
sizeof(buf), num);
3609 void VmaStringBuilder::AddNumber(uint64_t num)
3612 VmaUint64ToStr(buf,
sizeof(buf), num);
3616 void VmaStringBuilder::AddPointer(
const void* ptr)
3619 VmaPtrToStr(buf,
sizeof(buf), ptr);
3623 #endif // #if VMA_STATS_STRING_ENABLED 3628 #if VMA_STATS_STRING_ENABLED 3633 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3636 void BeginObject(
bool singleLine =
false);
3639 void BeginArray(
bool singleLine =
false);
3642 void WriteString(
const char* pStr);
3643 void BeginString(
const char* pStr = VMA_NULL);
3644 void ContinueString(
const char* pStr);
3645 void ContinueString(uint32_t n);
3646 void ContinueString(uint64_t n);
3647 void EndString(
const char* pStr = VMA_NULL);
3649 void WriteNumber(uint32_t n);
3650 void WriteNumber(uint64_t n);
3651 void WriteBool(
bool b);
3655 static const char*
const INDENT;
3657 enum COLLECTION_TYPE
3659 COLLECTION_TYPE_OBJECT,
3660 COLLECTION_TYPE_ARRAY,
3664 COLLECTION_TYPE type;
3665 uint32_t valueCount;
3666 bool singleLineMode;
3669 VmaStringBuilder& m_SB;
3670 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3671 bool m_InsideString;
3673 void BeginValue(
bool isString);
3674 void WriteIndent(
bool oneLess =
false);
3677 const char*
const VmaJsonWriter::INDENT =
" ";
3679 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3681 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3682 m_InsideString(false)
3686 VmaJsonWriter::~VmaJsonWriter()
3688 VMA_ASSERT(!m_InsideString);
3689 VMA_ASSERT(m_Stack.empty());
3692 void VmaJsonWriter::BeginObject(
bool singleLine)
3694 VMA_ASSERT(!m_InsideString);
3700 item.type = COLLECTION_TYPE_OBJECT;
3701 item.valueCount = 0;
3702 item.singleLineMode = singleLine;
3703 m_Stack.push_back(item);
3706 void VmaJsonWriter::EndObject()
3708 VMA_ASSERT(!m_InsideString);
3713 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3717 void VmaJsonWriter::BeginArray(
bool singleLine)
3719 VMA_ASSERT(!m_InsideString);
3725 item.type = COLLECTION_TYPE_ARRAY;
3726 item.valueCount = 0;
3727 item.singleLineMode = singleLine;
3728 m_Stack.push_back(item);
3731 void VmaJsonWriter::EndArray()
3733 VMA_ASSERT(!m_InsideString);
3738 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3742 void VmaJsonWriter::WriteString(
const char* pStr)
3748 void VmaJsonWriter::BeginString(
const char* pStr)
3750 VMA_ASSERT(!m_InsideString);
3754 m_InsideString =
true;
3755 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3757 ContinueString(pStr);
3761 void VmaJsonWriter::ContinueString(
const char* pStr)
3763 VMA_ASSERT(m_InsideString);
3765 const size_t strLen = strlen(pStr);
3766 for(
size_t i = 0; i < strLen; ++i)
3793 VMA_ASSERT(0 &&
"Character not currently supported.");
3799 void VmaJsonWriter::ContinueString(uint32_t n)
3801 VMA_ASSERT(m_InsideString);
3805 void VmaJsonWriter::ContinueString(uint64_t n)
3807 VMA_ASSERT(m_InsideString);
3811 void VmaJsonWriter::EndString(
const char* pStr)
3813 VMA_ASSERT(m_InsideString);
3814 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3816 ContinueString(pStr);
3819 m_InsideString =
false;
3822 void VmaJsonWriter::WriteNumber(uint32_t n)
3824 VMA_ASSERT(!m_InsideString);
3829 void VmaJsonWriter::WriteNumber(uint64_t n)
3831 VMA_ASSERT(!m_InsideString);
3836 void VmaJsonWriter::WriteBool(
bool b)
3838 VMA_ASSERT(!m_InsideString);
3840 m_SB.Add(b ?
"true" :
"false");
3843 void VmaJsonWriter::WriteNull()
3845 VMA_ASSERT(!m_InsideString);
3850 void VmaJsonWriter::BeginValue(
bool isString)
3852 if(!m_Stack.empty())
3854 StackItem& currItem = m_Stack.back();
3855 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3856 currItem.valueCount % 2 == 0)
3858 VMA_ASSERT(isString);
3861 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3862 currItem.valueCount % 2 != 0)
3866 else if(currItem.valueCount > 0)
3875 ++currItem.valueCount;
3879 void VmaJsonWriter::WriteIndent(
bool oneLess)
3881 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
3885 size_t count = m_Stack.size();
3886 if(count > 0 && oneLess)
3890 for(
size_t i = 0; i < count; ++i)
3897 #endif // #if VMA_STATS_STRING_ENABLED 3901 VkDeviceSize VmaAllocation_T::GetOffset()
const 3905 case ALLOCATION_TYPE_BLOCK:
3906 return m_BlockAllocation.m_Offset;
3907 case ALLOCATION_TYPE_OWN:
3915 VkDeviceMemory VmaAllocation_T::GetMemory()
const 3919 case ALLOCATION_TYPE_BLOCK:
3920 return m_BlockAllocation.m_Block->m_hMemory;
3921 case ALLOCATION_TYPE_OWN:
3922 return m_OwnAllocation.m_hMemory;
3925 return VK_NULL_HANDLE;
3929 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 3933 case ALLOCATION_TYPE_BLOCK:
3934 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
3935 case ALLOCATION_TYPE_OWN:
3936 return m_OwnAllocation.m_MemoryTypeIndex;
3943 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 3947 case ALLOCATION_TYPE_BLOCK:
3948 return m_BlockAllocation.m_Block->m_BlockVectorType;
3949 case ALLOCATION_TYPE_OWN:
3950 return (m_OwnAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
3953 return VMA_BLOCK_VECTOR_TYPE_COUNT;
3957 void* VmaAllocation_T::GetMappedData()
const 3961 case ALLOCATION_TYPE_BLOCK:
3962 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
3964 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
3971 case ALLOCATION_TYPE_OWN:
3972 return m_OwnAllocation.m_pMappedData;
3979 bool VmaAllocation_T::CanBecomeLost()
const 3983 case ALLOCATION_TYPE_BLOCK:
3984 return m_BlockAllocation.m_CanBecomeLost;
3985 case ALLOCATION_TYPE_OWN:
3993 VmaPool VmaAllocation_T::GetPool()
const 3995 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3996 return m_BlockAllocation.m_hPool;
3999 VkResult VmaAllocation_T::OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator)
4001 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4002 if(m_OwnAllocation.m_PersistentMap)
4004 return (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4005 hAllocator->m_hDevice,
4006 m_OwnAllocation.m_hMemory,
4010 &m_OwnAllocation.m_pMappedData);
4014 void VmaAllocation_T::OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator)
4016 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4017 if(m_OwnAllocation.m_pMappedData)
4019 VMA_ASSERT(m_OwnAllocation.m_PersistentMap);
4020 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_OwnAllocation.m_hMemory);
4021 m_OwnAllocation.m_pMappedData = VMA_NULL;
4026 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4028 VMA_ASSERT(CanBecomeLost());
4034 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4037 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4042 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4048 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4058 #if VMA_STATS_STRING_ENABLED 4061 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4070 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4074 json.WriteString(
"Blocks");
4077 json.WriteString(
"Allocations");
4080 json.WriteString(
"UnusedRanges");
4083 json.WriteString(
"UsedBytes");
4086 json.WriteString(
"UnusedBytes");
4091 json.WriteString(
"AllocationSize");
4092 json.BeginObject(
true);
4093 json.WriteString(
"Min");
4095 json.WriteString(
"Avg");
4097 json.WriteString(
"Max");
4104 json.WriteString(
"UnusedRangeSize");
4105 json.BeginObject(
true);
4106 json.WriteString(
"Min");
4108 json.WriteString(
"Avg");
4110 json.WriteString(
"Max");
4118 #endif // #if VMA_STATS_STRING_ENABLED 4120 struct VmaSuballocationItemSizeLess
4123 const VmaSuballocationList::iterator lhs,
4124 const VmaSuballocationList::iterator rhs)
const 4126 return lhs->size < rhs->size;
4129 const VmaSuballocationList::iterator lhs,
4130 VkDeviceSize rhsSize)
const 4132 return lhs->size < rhsSize;
4136 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
4137 m_MemoryTypeIndex(UINT32_MAX),
4138 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
4139 m_hMemory(VK_NULL_HANDLE),
4141 m_PersistentMap(false),
4142 m_pMappedData(VMA_NULL),
4145 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4146 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4150 void VmaDeviceMemoryBlock::Init(
4151 uint32_t newMemoryTypeIndex,
4152 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
4153 VkDeviceMemory newMemory,
4154 VkDeviceSize newSize,
4158 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4160 m_MemoryTypeIndex = newMemoryTypeIndex;
4161 m_BlockVectorType = newBlockVectorType;
4162 m_hMemory = newMemory;
4164 m_PersistentMap = persistentMap;
4165 m_pMappedData = pMappedData;
4167 m_SumFreeSize = newSize;
4169 m_Suballocations.clear();
4170 m_FreeSuballocationsBySize.clear();
4172 VmaSuballocation suballoc = {};
4173 suballoc.offset = 0;
4174 suballoc.size = newSize;
4175 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4176 suballoc.hAllocation = VK_NULL_HANDLE;
4178 m_Suballocations.push_back(suballoc);
4179 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4181 m_FreeSuballocationsBySize.push_back(suballocItem);
4184 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
4188 VMA_ASSERT(IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
4190 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
4191 if(m_pMappedData != VMA_NULL)
4193 (allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, m_hMemory);
4194 m_pMappedData = VMA_NULL;
4197 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Size, m_hMemory);
4198 m_hMemory = VK_NULL_HANDLE;
4201 bool VmaDeviceMemoryBlock::Validate()
const 4203 if((m_hMemory == VK_NULL_HANDLE) ||
4205 m_Suballocations.empty())
4211 VkDeviceSize calculatedOffset = 0;
4213 uint32_t calculatedFreeCount = 0;
4215 VkDeviceSize calculatedSumFreeSize = 0;
4218 size_t freeSuballocationsToRegister = 0;
4220 bool prevFree =
false;
4222 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4223 suballocItem != m_Suballocations.cend();
4226 const VmaSuballocation& subAlloc = *suballocItem;
4229 if(subAlloc.offset != calculatedOffset)
4234 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4236 if(prevFree && currFree)
4240 prevFree = currFree;
4242 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4249 calculatedSumFreeSize += subAlloc.size;
4250 ++calculatedFreeCount;
4251 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4253 ++freeSuballocationsToRegister;
4257 calculatedOffset += subAlloc.size;
4262 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4267 VkDeviceSize lastSize = 0;
4268 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4270 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4273 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4278 if(suballocItem->size < lastSize)
4283 lastSize = suballocItem->size;
4288 (calculatedOffset == m_Size) &&
4289 (calculatedSumFreeSize == m_SumFreeSize) &&
4290 (calculatedFreeCount == m_FreeCount);
4293 VkDeviceSize VmaDeviceMemoryBlock::GetUnusedRangeSizeMax()
const 4295 if(!m_FreeSuballocationsBySize.empty())
4297 return m_FreeSuballocationsBySize.back()->size;
4315 bool VmaDeviceMemoryBlock::CreateAllocationRequest(
4316 uint32_t currentFrameIndex,
4317 uint32_t frameInUseCount,
4318 VkDeviceSize bufferImageGranularity,
4319 VkDeviceSize allocSize,
4320 VkDeviceSize allocAlignment,
4321 VmaSuballocationType allocType,
4322 bool canMakeOtherLost,
4323 VmaAllocationRequest* pAllocationRequest)
4325 VMA_ASSERT(allocSize > 0);
4326 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4327 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4328 VMA_HEAVY_ASSERT(Validate());
4331 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4337 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4338 if(freeSuballocCount > 0)
4343 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4344 m_FreeSuballocationsBySize.data(),
4345 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4347 VmaSuballocationItemSizeLess());
4348 size_t index = it - m_FreeSuballocationsBySize.data();
4349 for(; index < freeSuballocCount; ++index)
4354 bufferImageGranularity,
4358 m_FreeSuballocationsBySize[index],
4360 &pAllocationRequest->offset,
4361 &pAllocationRequest->itemsToMakeLostCount,
4362 &pAllocationRequest->sumFreeSize,
4363 &pAllocationRequest->sumItemSize))
4365 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4373 for(
size_t index = freeSuballocCount; index--; )
4378 bufferImageGranularity,
4382 m_FreeSuballocationsBySize[index],
4384 &pAllocationRequest->offset,
4385 &pAllocationRequest->itemsToMakeLostCount,
4386 &pAllocationRequest->sumFreeSize,
4387 &pAllocationRequest->sumItemSize))
4389 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4396 if(canMakeOtherLost)
4400 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4401 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4403 VmaAllocationRequest tmpAllocRequest = {};
4404 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4405 suballocIt != m_Suballocations.end();
4408 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4409 suballocIt->hAllocation->CanBecomeLost())
4414 bufferImageGranularity,
4420 &tmpAllocRequest.offset,
4421 &tmpAllocRequest.itemsToMakeLostCount,
4422 &tmpAllocRequest.sumFreeSize,
4423 &tmpAllocRequest.sumItemSize))
4425 tmpAllocRequest.item = suballocIt;
4427 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4429 *pAllocationRequest = tmpAllocRequest;
4435 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4444 bool VmaDeviceMemoryBlock::MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest)
4446 while(pAllocationRequest->itemsToMakeLostCount > 0)
4448 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4450 ++pAllocationRequest->item;
4452 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4453 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4454 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4455 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4457 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4458 --pAllocationRequest->itemsToMakeLostCount;
4466 VMA_HEAVY_ASSERT(Validate());
4467 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4468 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4473 uint32_t VmaDeviceMemoryBlock::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4475 uint32_t lostAllocationCount = 0;
4476 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4477 it != m_Suballocations.end();
4480 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4481 it->hAllocation->CanBecomeLost() &&
4482 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4484 it = FreeSuballocation(it);
4485 ++lostAllocationCount;
4488 return lostAllocationCount;
4491 bool VmaDeviceMemoryBlock::CheckAllocation(
4492 uint32_t currentFrameIndex,
4493 uint32_t frameInUseCount,
4494 VkDeviceSize bufferImageGranularity,
4495 VkDeviceSize allocSize,
4496 VkDeviceSize allocAlignment,
4497 VmaSuballocationType allocType,
4498 VmaSuballocationList::const_iterator suballocItem,
4499 bool canMakeOtherLost,
4500 VkDeviceSize* pOffset,
4501 size_t* itemsToMakeLostCount,
4502 VkDeviceSize* pSumFreeSize,
4503 VkDeviceSize* pSumItemSize)
const 4505 VMA_ASSERT(allocSize > 0);
4506 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4507 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4508 VMA_ASSERT(pOffset != VMA_NULL);
4510 *itemsToMakeLostCount = 0;
4514 if(canMakeOtherLost)
4516 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4518 *pSumFreeSize = suballocItem->size;
4522 if(suballocItem->hAllocation->CanBecomeLost() &&
4523 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4525 ++*itemsToMakeLostCount;
4526 *pSumItemSize = suballocItem->size;
4535 if(m_Size - suballocItem->offset < allocSize)
4541 *pOffset = suballocItem->offset;
4544 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4546 *pOffset += VMA_DEBUG_MARGIN;
4550 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4551 *pOffset = VmaAlignUp(*pOffset, alignment);
4555 if(bufferImageGranularity > 1)
4557 bool bufferImageGranularityConflict =
false;
4558 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4559 while(prevSuballocItem != m_Suballocations.cbegin())
4562 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4563 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4565 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4567 bufferImageGranularityConflict =
true;
4575 if(bufferImageGranularityConflict)
4577 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4583 if(*pOffset >= suballocItem->offset + suballocItem->size)
4589 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
4592 VmaSuballocationList::const_iterator next = suballocItem;
4594 const VkDeviceSize requiredEndMargin =
4595 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4597 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
4599 if(suballocItem->offset + totalSize > m_Size)
4606 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
4607 if(totalSize > suballocItem->size)
4609 VkDeviceSize remainingSize = totalSize - suballocItem->size;
4610 while(remainingSize > 0)
4613 if(lastSuballocItem == m_Suballocations.cend())
4617 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4619 *pSumFreeSize += lastSuballocItem->size;
4623 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
4624 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
4625 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4627 ++*itemsToMakeLostCount;
4628 *pSumItemSize += lastSuballocItem->size;
4635 remainingSize = (lastSuballocItem->size < remainingSize) ?
4636 remainingSize - lastSuballocItem->size : 0;
4642 if(bufferImageGranularity > 1)
4644 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
4646 while(nextSuballocItem != m_Suballocations.cend())
4648 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4649 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4651 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4653 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
4654 if(nextSuballoc.hAllocation->CanBecomeLost() &&
4655 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4657 ++*itemsToMakeLostCount;
4676 const VmaSuballocation& suballoc = *suballocItem;
4677 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4679 *pSumFreeSize = suballoc.size;
4682 if(suballoc.size < allocSize)
4688 *pOffset = suballoc.offset;
4691 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4693 *pOffset += VMA_DEBUG_MARGIN;
4697 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4698 *pOffset = VmaAlignUp(*pOffset, alignment);
4702 if(bufferImageGranularity > 1)
4704 bool bufferImageGranularityConflict =
false;
4705 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4706 while(prevSuballocItem != m_Suballocations.cbegin())
4709 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4710 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4712 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4714 bufferImageGranularityConflict =
true;
4722 if(bufferImageGranularityConflict)
4724 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4729 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
4732 VmaSuballocationList::const_iterator next = suballocItem;
4734 const VkDeviceSize requiredEndMargin =
4735 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4738 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
4745 if(bufferImageGranularity > 1)
4747 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
4749 while(nextSuballocItem != m_Suballocations.cend())
4751 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4752 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4754 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4773 bool VmaDeviceMemoryBlock::IsEmpty()
const 4775 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4778 void VmaDeviceMemoryBlock::Alloc(
4779 const VmaAllocationRequest& request,
4780 VmaSuballocationType type,
4781 VkDeviceSize allocSize,
4782 VmaAllocation hAllocation)
4784 VMA_ASSERT(request.item != m_Suballocations.end());
4785 VmaSuballocation& suballoc = *request.item;
4787 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4789 VMA_ASSERT(request.offset >= suballoc.offset);
4790 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4791 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4792 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4796 UnregisterFreeSuballocation(request.item);
4798 suballoc.offset = request.offset;
4799 suballoc.size = allocSize;
4800 suballoc.type = type;
4801 suballoc.hAllocation = hAllocation;
4806 VmaSuballocation paddingSuballoc = {};
4807 paddingSuballoc.offset = request.offset + allocSize;
4808 paddingSuballoc.size = paddingEnd;
4809 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4810 VmaSuballocationList::iterator next = request.item;
4812 const VmaSuballocationList::iterator paddingEndItem =
4813 m_Suballocations.insert(next, paddingSuballoc);
4814 RegisterFreeSuballocation(paddingEndItem);
4820 VmaSuballocation paddingSuballoc = {};
4821 paddingSuballoc.offset = request.offset - paddingBegin;
4822 paddingSuballoc.size = paddingBegin;
4823 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4824 const VmaSuballocationList::iterator paddingBeginItem =
4825 m_Suballocations.insert(request.item, paddingSuballoc);
4826 RegisterFreeSuballocation(paddingBeginItem);
4830 m_FreeCount = m_FreeCount - 1;
4831 if(paddingBegin > 0)
4839 m_SumFreeSize -= allocSize;
4842 VmaSuballocationList::iterator VmaDeviceMemoryBlock::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
4845 VmaSuballocation& suballoc = *suballocItem;
4846 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4847 suballoc.hAllocation = VK_NULL_HANDLE;
4851 m_SumFreeSize += suballoc.size;
4854 bool mergeWithNext =
false;
4855 bool mergeWithPrev =
false;
4857 VmaSuballocationList::iterator nextItem = suballocItem;
4859 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
4861 mergeWithNext =
true;
4864 VmaSuballocationList::iterator prevItem = suballocItem;
4865 if(suballocItem != m_Suballocations.begin())
4868 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4870 mergeWithPrev =
true;
4876 UnregisterFreeSuballocation(nextItem);
4877 MergeFreeWithNext(suballocItem);
4882 UnregisterFreeSuballocation(prevItem);
4883 MergeFreeWithNext(prevItem);
4884 RegisterFreeSuballocation(prevItem);
4889 RegisterFreeSuballocation(suballocItem);
4890 return suballocItem;
4894 void VmaDeviceMemoryBlock::Free(
const VmaAllocation allocation)
4896 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4897 suballocItem != m_Suballocations.end();
4900 VmaSuballocation& suballoc = *suballocItem;
4901 if(suballoc.hAllocation == allocation)
4903 FreeSuballocation(suballocItem);
4904 VMA_HEAVY_ASSERT(Validate());
4908 VMA_ASSERT(0 &&
"Not found!");
4911 #if VMA_STATS_STRING_ENABLED 4913 void VmaDeviceMemoryBlock::PrintDetailedMap(
class VmaJsonWriter& json)
const 4917 json.WriteString(
"TotalBytes");
4918 json.WriteNumber(m_Size);
4920 json.WriteString(
"UnusedBytes");
4921 json.WriteNumber(m_SumFreeSize);
4923 json.WriteString(
"Allocations");
4924 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4926 json.WriteString(
"UnusedRanges");
4927 json.WriteNumber(m_FreeCount);
4929 json.WriteString(
"Suballocations");
4932 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4933 suballocItem != m_Suballocations.cend();
4934 ++suballocItem, ++i)
4936 json.BeginObject(
true);
4938 json.WriteString(
"Type");
4939 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4941 json.WriteString(
"Size");
4942 json.WriteNumber(suballocItem->size);
4944 json.WriteString(
"Offset");
4945 json.WriteNumber(suballocItem->offset);
4954 #endif // #if VMA_STATS_STRING_ENABLED 4956 void VmaDeviceMemoryBlock::MergeFreeWithNext(VmaSuballocationList::iterator item)
4958 VMA_ASSERT(item != m_Suballocations.end());
4959 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4961 VmaSuballocationList::iterator nextItem = item;
4963 VMA_ASSERT(nextItem != m_Suballocations.end());
4964 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
4966 item->size += nextItem->size;
4968 m_Suballocations.erase(nextItem);
4971 void VmaDeviceMemoryBlock::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
4973 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4974 VMA_ASSERT(item->size > 0);
4978 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
4980 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4982 if(m_FreeSuballocationsBySize.empty())
4984 m_FreeSuballocationsBySize.push_back(item);
4988 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
4996 void VmaDeviceMemoryBlock::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
4998 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4999 VMA_ASSERT(item->size > 0);
5003 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5005 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5007 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5008 m_FreeSuballocationsBySize.data(),
5009 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5011 VmaSuballocationItemSizeLess());
5012 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5013 index < m_FreeSuballocationsBySize.size();
5016 if(m_FreeSuballocationsBySize[index] == item)
5018 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5021 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5023 VMA_ASSERT(0 &&
"Not found.");
5029 bool VmaDeviceMemoryBlock::ValidateFreeSuballocationList()
const 5031 VkDeviceSize lastSize = 0;
5032 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5034 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5036 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5041 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5046 if(it->size < lastSize)
5052 lastSize = it->size;
5059 memset(&outInfo, 0,
sizeof(outInfo));
5064 static void CalcAllocationStatInfo(
VmaStatInfo& outInfo,
const VmaDeviceMemoryBlock& block)
5068 const uint32_t rangeCount = (uint32_t)block.m_Suballocations.size();
5080 for(VmaSuballocationList::const_iterator suballocItem = block.m_Suballocations.cbegin();
5081 suballocItem != block.m_Suballocations.cend();
5084 const VmaSuballocation& suballoc = *suballocItem;
5085 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5112 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5120 VmaPool_T::VmaPool_T(
5121 VmaAllocator hAllocator,
5125 createInfo.memoryTypeIndex,
5127 VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
5128 createInfo.blockSize,
5129 createInfo.minBlockCount,
5130 createInfo.maxBlockCount,
5132 createInfo.frameInUseCount,
5137 VmaPool_T::~VmaPool_T()
5141 #if VMA_STATS_STRING_ENABLED 5143 #endif // #if VMA_STATS_STRING_ENABLED 5145 VmaBlockVector::VmaBlockVector(
5146 VmaAllocator hAllocator,
5147 uint32_t memoryTypeIndex,
5148 VMA_BLOCK_VECTOR_TYPE blockVectorType,
5149 VkDeviceSize preferredBlockSize,
5150 size_t minBlockCount,
5151 size_t maxBlockCount,
5152 VkDeviceSize bufferImageGranularity,
5153 uint32_t frameInUseCount,
5154 bool isCustomPool) :
5155 m_hAllocator(hAllocator),
5156 m_MemoryTypeIndex(memoryTypeIndex),
5157 m_BlockVectorType(blockVectorType),
5158 m_PreferredBlockSize(preferredBlockSize),
5159 m_MinBlockCount(minBlockCount),
5160 m_MaxBlockCount(maxBlockCount),
5161 m_BufferImageGranularity(bufferImageGranularity),
5162 m_FrameInUseCount(frameInUseCount),
5163 m_IsCustomPool(isCustomPool),
5164 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5165 m_HasEmptyBlock(false),
5166 m_pDefragmentator(VMA_NULL)
5170 VmaBlockVector::~VmaBlockVector()
5172 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5174 for(
size_t i = m_Blocks.size(); i--; )
5176 m_Blocks[i]->Destroy(m_hAllocator);
5177 vma_delete(m_hAllocator, m_Blocks[i]);
5181 VkResult VmaBlockVector::CreateMinBlocks()
5183 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5185 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5186 if(res != VK_SUCCESS)
5194 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5202 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5204 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5206 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5208 VMA_HEAVY_ASSERT(pBlock->Validate());
5210 const uint32_t rangeCount = (uint32_t)pBlock->m_Suballocations.size();
5212 pStats->
size += pBlock->m_Size;
5220 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5222 VkResult VmaBlockVector::Allocate(
5223 VmaPool hCurrentPool,
5224 uint32_t currentFrameIndex,
5225 const VkMemoryRequirements& vkMemReq,
5227 VmaSuballocationType suballocType,
5228 VmaAllocation* pAllocation)
5232 (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED))
5234 VMA_ASSERT(0 &&
"Usage of VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT must match VMA_POOL_CREATE_PERSISTENT_MAP_BIT.");
5235 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5238 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5242 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5244 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5245 VMA_ASSERT(pCurrBlock);
5246 VmaAllocationRequest currRequest = {};
5247 if(pCurrBlock->CreateAllocationRequest(
5250 m_BufferImageGranularity,
5258 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5261 if(pCurrBlock->IsEmpty())
5263 m_HasEmptyBlock =
false;
5266 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5267 pCurrBlock->Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5268 (*pAllocation)->InitBlockAllocation(
5277 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5278 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5283 const bool canCreateNewBlock =
5285 (m_Blocks.size() < m_MaxBlockCount);
5288 if(canCreateNewBlock)
5291 VkDeviceSize blockSize = m_PreferredBlockSize;
5292 size_t newBlockIndex = 0;
5293 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5296 if(res < 0 && m_IsCustomPool ==
false)
5300 if(blockSize >= vkMemReq.size)
5302 res = CreateBlock(blockSize, &newBlockIndex);
5307 if(blockSize >= vkMemReq.size)
5309 res = CreateBlock(blockSize, &newBlockIndex);
5314 if(res == VK_SUCCESS)
5316 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5317 VMA_ASSERT(pBlock->m_Size >= vkMemReq.size);
5320 VmaAllocationRequest allocRequest = {};
5321 allocRequest.item = pBlock->m_Suballocations.begin();
5322 allocRequest.offset = 0;
5323 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5324 pBlock->Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5325 (*pAllocation)->InitBlockAllocation(
5328 allocRequest.offset,
5334 VMA_HEAVY_ASSERT(pBlock->Validate());
5335 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5344 if(canMakeOtherLost)
5346 uint32_t tryIndex = 0;
5347 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5349 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5350 VmaAllocationRequest bestRequest = {};
5351 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5355 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5357 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5358 VMA_ASSERT(pCurrBlock);
5359 VmaAllocationRequest currRequest = {};
5360 if(pCurrBlock->CreateAllocationRequest(
5363 m_BufferImageGranularity,
5370 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5371 if(pBestRequestBlock == VMA_NULL ||
5372 currRequestCost < bestRequestCost)
5374 pBestRequestBlock = pCurrBlock;
5375 bestRequest = currRequest;
5376 bestRequestCost = currRequestCost;
5378 if(bestRequestCost == 0)
5386 if(pBestRequestBlock != VMA_NULL)
5388 if(pBestRequestBlock->MakeRequestedAllocationsLost(
5394 if(pBestRequestBlock->IsEmpty())
5396 m_HasEmptyBlock =
false;
5399 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5400 pBestRequestBlock->Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5401 (*pAllocation)->InitBlockAllocation(
5410 VMA_HEAVY_ASSERT(pBlock->Validate());
5411 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5425 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5427 return VK_ERROR_TOO_MANY_OBJECTS;
5431 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5434 void VmaBlockVector::Free(
5435 VmaAllocation hAllocation)
5437 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5441 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5443 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5445 pBlock->Free(hAllocation);
5446 VMA_HEAVY_ASSERT(pBlock->Validate());
5448 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5451 if(pBlock->IsEmpty())
5454 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5456 pBlockToDelete = pBlock;
5462 m_HasEmptyBlock =
true;
5466 IncrementallySortBlocks();
5471 if(pBlockToDelete != VMA_NULL)
5473 VMA_DEBUG_LOG(
" Deleted empty allocation");
5474 pBlockToDelete->Destroy(m_hAllocator);
5475 vma_delete(m_hAllocator, pBlockToDelete);
5479 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5481 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5483 if(m_Blocks[blockIndex] == pBlock)
5485 VmaVectorRemove(m_Blocks, blockIndex);
5492 void VmaBlockVector::IncrementallySortBlocks()
5495 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5497 if(m_Blocks[i - 1]->m_SumFreeSize > m_Blocks[i]->m_SumFreeSize)
5499 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5505 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5507 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5508 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5509 allocInfo.allocationSize = blockSize;
5510 VkDeviceMemory mem = VK_NULL_HANDLE;
5511 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
5520 void* pMappedData = VMA_NULL;
5521 const bool persistentMap = (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED);
5522 if(persistentMap && m_hAllocator->m_UnmapPersistentlyMappedMemoryCounter == 0)
5524 res = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5525 m_hAllocator->m_hDevice,
5533 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
5534 m_hAllocator->FreeVulkanMemory(m_MemoryTypeIndex, blockSize, mem);
5540 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5543 (VMA_BLOCK_VECTOR_TYPE)m_BlockVectorType,
5545 allocInfo.allocationSize,
5549 m_Blocks.push_back(pBlock);
5550 if(pNewBlockIndex != VMA_NULL)
5552 *pNewBlockIndex = m_Blocks.size() - 1;
5558 #if VMA_STATS_STRING_ENABLED 5560 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5562 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5568 json.WriteString(
"MemoryTypeIndex");
5569 json.WriteNumber(m_MemoryTypeIndex);
5571 if(m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
5573 json.WriteString(
"Mapped");
5574 json.WriteBool(
true);
5577 json.WriteString(
"BlockSize");
5578 json.WriteNumber(m_PreferredBlockSize);
5580 json.WriteString(
"BlockCount");
5581 json.BeginObject(
true);
5582 if(m_MinBlockCount > 0)
5584 json.WriteString(
"Min");
5585 json.WriteNumber(m_MinBlockCount);
5587 if(m_MaxBlockCount < SIZE_MAX)
5589 json.WriteString(
"Max");
5590 json.WriteNumber(m_MaxBlockCount);
5592 json.WriteString(
"Cur");
5593 json.WriteNumber(m_Blocks.size());
5596 if(m_FrameInUseCount > 0)
5598 json.WriteString(
"FrameInUseCount");
5599 json.WriteNumber(m_FrameInUseCount);
5604 json.WriteString(
"PreferredBlockSize");
5605 json.WriteNumber(m_PreferredBlockSize);
5608 json.WriteString(
"Blocks");
5610 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5612 m_Blocks[i]->PrintDetailedMap(json);
5619 #endif // #if VMA_STATS_STRING_ENABLED 5621 void VmaBlockVector::UnmapPersistentlyMappedMemory()
5623 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5625 for(
size_t i = m_Blocks.size(); i--; )
5627 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5628 if(pBlock->m_pMappedData != VMA_NULL)
5630 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
5631 (m_hAllocator->GetVulkanFunctions().vkUnmapMemory)(m_hAllocator->m_hDevice, pBlock->m_hMemory);
5632 pBlock->m_pMappedData = VMA_NULL;
5637 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
5639 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5641 VkResult finalResult = VK_SUCCESS;
5642 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
5644 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5645 if(pBlock->m_PersistentMap)
5647 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
5648 VkResult localResult = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5649 m_hAllocator->m_hDevice,
5654 &pBlock->m_pMappedData);
5655 if(localResult != VK_SUCCESS)
5657 finalResult = localResult;
5664 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5665 VmaAllocator hAllocator,
5666 uint32_t currentFrameIndex)
5668 if(m_pDefragmentator == VMA_NULL)
5670 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5676 return m_pDefragmentator;
5679 VkResult VmaBlockVector::Defragment(
5681 VkDeviceSize& maxBytesToMove,
5682 uint32_t& maxAllocationsToMove)
5684 if(m_pDefragmentator == VMA_NULL)
5689 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5692 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5695 if(pDefragmentationStats != VMA_NULL)
5697 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5698 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
5701 VMA_ASSERT(bytesMoved <= maxBytesToMove);
5702 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
5708 m_HasEmptyBlock =
false;
5709 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
5711 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
5712 if(pBlock->IsEmpty())
5714 if(m_Blocks.size() > m_MinBlockCount)
5716 if(pDefragmentationStats != VMA_NULL)
5719 pDefragmentationStats->
bytesFreed += pBlock->m_Size;
5722 VmaVectorRemove(m_Blocks, blockIndex);
5723 pBlock->Destroy(m_hAllocator);
5724 vma_delete(m_hAllocator, pBlock);
5728 m_HasEmptyBlock =
true;
5736 void VmaBlockVector::DestroyDefragmentator()
5738 if(m_pDefragmentator != VMA_NULL)
5740 vma_delete(m_hAllocator, m_pDefragmentator);
5741 m_pDefragmentator = VMA_NULL;
5745 void VmaBlockVector::MakePoolAllocationsLost(
5746 uint32_t currentFrameIndex,
5747 size_t* pLostAllocationCount)
5749 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5751 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5753 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5755 pBlock->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
5759 void VmaBlockVector::AddStats(
VmaStats* pStats)
5761 const uint32_t memTypeIndex = m_MemoryTypeIndex;
5762 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
5764 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5766 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5768 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5770 VMA_HEAVY_ASSERT(pBlock->Validate());
5772 CalcAllocationStatInfo(allocationStatInfo, *pBlock);
5773 VmaAddStatInfo(pStats->
total, allocationStatInfo);
5774 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
5775 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
5782 VmaDefragmentator::VmaDefragmentator(
5783 VmaAllocator hAllocator,
5784 VmaBlockVector* pBlockVector,
5785 uint32_t currentFrameIndex) :
5786 m_hAllocator(hAllocator),
5787 m_pBlockVector(pBlockVector),
5788 m_CurrentFrameIndex(currentFrameIndex),
5790 m_AllocationsMoved(0),
5791 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
5792 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
5796 VmaDefragmentator::~VmaDefragmentator()
5798 for(
size_t i = m_Blocks.size(); i--; )
5800 vma_delete(m_hAllocator, m_Blocks[i]);
5804 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
5806 AllocationInfo allocInfo;
5807 allocInfo.m_hAllocation = hAlloc;
5808 allocInfo.m_pChanged = pChanged;
5809 m_Allocations.push_back(allocInfo);
5812 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
5815 if(m_pMappedDataForDefragmentation)
5817 *ppMappedData = m_pMappedDataForDefragmentation;
5822 if(m_pBlock->m_PersistentMap)
5824 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
5825 *ppMappedData = m_pBlock->m_pMappedData;
5830 VkResult res = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5831 hAllocator->m_hDevice,
5832 m_pBlock->m_hMemory,
5836 &m_pMappedDataForDefragmentation);
5837 *ppMappedData = m_pMappedDataForDefragmentation;
5841 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
5843 if(m_pMappedDataForDefragmentation != VMA_NULL)
5845 (hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_pBlock->m_hMemory);
5849 VkResult VmaDefragmentator::DefragmentRound(
5850 VkDeviceSize maxBytesToMove,
5851 uint32_t maxAllocationsToMove)
5853 if(m_Blocks.empty())
5858 size_t srcBlockIndex = m_Blocks.size() - 1;
5859 size_t srcAllocIndex = SIZE_MAX;
5865 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
5867 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
5870 if(srcBlockIndex == 0)
5877 srcAllocIndex = SIZE_MAX;
5882 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
5886 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
5887 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
5889 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
5890 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
5891 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
5892 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
5895 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
5897 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
5898 VmaAllocationRequest dstAllocRequest;
5899 if(pDstBlockInfo->m_pBlock->CreateAllocationRequest(
5900 m_CurrentFrameIndex,
5901 m_pBlockVector->GetFrameInUseCount(),
5902 m_pBlockVector->GetBufferImageGranularity(),
5907 &dstAllocRequest) &&
5909 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
5911 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
5914 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
5915 (m_BytesMoved + size > maxBytesToMove))
5917 return VK_INCOMPLETE;
5920 void* pDstMappedData = VMA_NULL;
5921 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
5922 if(res != VK_SUCCESS)
5927 void* pSrcMappedData = VMA_NULL;
5928 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
5929 if(res != VK_SUCCESS)
5936 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
5937 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
5938 static_cast<size_t>(size));
5940 pDstBlockInfo->m_pBlock->Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
5941 pSrcBlockInfo->m_pBlock->Free(allocInfo.m_hAllocation);
5943 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
5945 if(allocInfo.m_pChanged != VMA_NULL)
5947 *allocInfo.m_pChanged = VK_TRUE;
5950 ++m_AllocationsMoved;
5951 m_BytesMoved += size;
5953 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
5961 if(srcAllocIndex > 0)
5967 if(srcBlockIndex > 0)
5970 srcAllocIndex = SIZE_MAX;
5980 VkResult VmaDefragmentator::Defragment(
5981 VkDeviceSize maxBytesToMove,
5982 uint32_t maxAllocationsToMove)
5984 if(m_Allocations.empty())
5990 const size_t blockCount = m_pBlockVector->m_Blocks.size();
5991 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
5993 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
5994 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
5995 m_Blocks.push_back(pBlockInfo);
5999 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6002 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6004 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6006 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6008 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6009 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6010 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6012 (*it)->m_Allocations.push_back(allocInfo);
6020 m_Allocations.clear();
6022 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6024 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6025 pBlockInfo->CalcHasNonMovableAllocations();
6026 pBlockInfo->SortAllocationsBySizeDescecnding();
6030 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6033 VkResult result = VK_SUCCESS;
6034 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6036 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6040 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6042 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6048 bool VmaDefragmentator::MoveMakesSense(
6049 size_t dstBlockIndex, VkDeviceSize dstOffset,
6050 size_t srcBlockIndex, VkDeviceSize srcOffset)
6052 if(dstBlockIndex < srcBlockIndex)
6056 if(dstBlockIndex > srcBlockIndex)
6060 if(dstOffset < srcOffset)
6072 m_PhysicalDevice(pCreateInfo->physicalDevice),
6073 m_hDevice(pCreateInfo->device),
6074 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6075 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6076 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6077 m_UnmapPersistentlyMappedMemoryCounter(0),
6078 m_PreferredLargeHeapBlockSize(0),
6079 m_PreferredSmallHeapBlockSize(0),
6080 m_CurrentFrameIndex(0),
6081 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6085 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6086 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6087 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6089 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6090 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
6092 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6094 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6105 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6106 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6115 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6117 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6118 if(limit != VK_WHOLE_SIZE)
6120 m_HeapSizeLimit[heapIndex] = limit;
6121 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6123 m_MemProps.memoryHeaps[heapIndex].size = limit;
6129 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6131 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6133 for(
size_t blockVectorTypeIndex = 0; blockVectorTypeIndex < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorTypeIndex)
6135 m_pBlockVectors[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, VmaBlockVector)(
6138 static_cast<VMA_BLOCK_VECTOR_TYPE
>(blockVectorTypeIndex),
6142 GetBufferImageGranularity(),
6147 m_pOwnAllocations[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6152 VmaAllocator_T::~VmaAllocator_T()
6154 VMA_ASSERT(m_Pools.empty());
6156 for(
size_t i = GetMemoryTypeCount(); i--; )
6158 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
6160 vma_delete(
this, m_pOwnAllocations[i][j]);
6161 vma_delete(
this, m_pBlockVectors[i][j]);
6166 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6168 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6169 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6170 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6171 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6172 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6173 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6174 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6175 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6176 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6177 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6178 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6179 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6180 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6181 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6182 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6183 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6185 if(pVulkanFunctions != VMA_NULL)
6187 m_VulkanFunctions = *pVulkanFunctions;
6192 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6193 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6194 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6195 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6196 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6197 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6198 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6199 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6200 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6201 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6202 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6203 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6204 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6205 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6208 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6210 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6211 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6212 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6213 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6216 VkResult VmaAllocator_T::AllocateMemoryOfType(
6217 const VkMemoryRequirements& vkMemReq,
6219 uint32_t memTypeIndex,
6220 VmaSuballocationType suballocType,
6221 VmaAllocation* pAllocation)
6223 VMA_ASSERT(pAllocation != VMA_NULL);
6224 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6226 uint32_t blockVectorType = VmaAllocationCreateFlagsToBlockVectorType(createInfo.
flags);
6227 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6228 VMA_ASSERT(blockVector);
6230 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6232 const bool ownMemory =
6234 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
6236 vkMemReq.size > preferredBlockSize / 2);
6242 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6246 return AllocateOwnMemory(
6257 VkResult res = blockVector->Allocate(
6259 m_CurrentFrameIndex.load(),
6264 if(res == VK_SUCCESS)
6270 res = AllocateOwnMemory(
6275 createInfo.pUserData,
6277 if(res == VK_SUCCESS)
6280 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
6286 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6292 VkResult VmaAllocator_T::AllocateOwnMemory(
6294 VmaSuballocationType suballocType,
6295 uint32_t memTypeIndex,
6298 VmaAllocation* pAllocation)
6300 VMA_ASSERT(pAllocation);
6302 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6303 allocInfo.memoryTypeIndex = memTypeIndex;
6304 allocInfo.allocationSize = size;
6307 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6308 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6311 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6315 void* pMappedData =
nullptr;
6318 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
6320 res = vkMapMemory(m_hDevice, hMemory, 0, VK_WHOLE_SIZE, 0, &pMappedData);
6323 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6324 FreeVulkanMemory(memTypeIndex, size, hMemory);
6330 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6331 (*pAllocation)->InitOwnAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
6335 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6336 AllocationVectorType* pOwnAllocations = m_pOwnAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
6337 VMA_ASSERT(pOwnAllocations);
6338 VmaVectorInsertSorted<VmaPointerLess>(*pOwnAllocations, *pAllocation);
6341 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
6346 VkResult VmaAllocator_T::AllocateMemory(
6347 const VkMemoryRequirements& vkMemReq,
6349 VmaSuballocationType suballocType,
6350 VmaAllocation* pAllocation)
6355 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6356 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6358 if((createInfo.
pool != VK_NULL_HANDLE) &&
6361 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT when pool != null is invalid.");
6362 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6365 if(createInfo.
pool != VK_NULL_HANDLE)
6367 return createInfo.
pool->m_BlockVector.Allocate(
6369 m_CurrentFrameIndex.load(),
6378 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6379 uint32_t memTypeIndex = UINT32_MAX;
6381 if(res == VK_SUCCESS)
6383 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6385 if(res == VK_SUCCESS)
6395 memoryTypeBits &= ~(1u << memTypeIndex);
6398 if(res == VK_SUCCESS)
6400 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6402 if(res == VK_SUCCESS)
6412 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6423 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6425 VMA_ASSERT(allocation);
6427 if(allocation->CanBecomeLost() ==
false ||
6428 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6430 switch(allocation->GetType())
6432 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6434 VmaBlockVector* pBlockVector = VMA_NULL;
6435 VmaPool hPool = allocation->GetPool();
6436 if(hPool != VK_NULL_HANDLE)
6438 pBlockVector = &hPool->m_BlockVector;
6442 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6443 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
6444 pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6446 pBlockVector->Free(allocation);
6449 case VmaAllocation_T::ALLOCATION_TYPE_OWN:
6450 FreeOwnMemory(allocation);
6457 vma_delete(
this, allocation);
6460 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6463 InitStatInfo(pStats->
total);
6464 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6466 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6470 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6472 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6473 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6475 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6476 VMA_ASSERT(pBlockVector);
6477 pBlockVector->AddStats(pStats);
6483 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6484 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6486 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
6491 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6493 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6494 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6495 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6497 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6498 VMA_ASSERT(pOwnAllocVector);
6499 for(
size_t allocIndex = 0, allocCount = pOwnAllocVector->size(); allocIndex < allocCount; ++allocIndex)
6502 (*pOwnAllocVector)[allocIndex]->OwnAllocCalcStatsInfo(allocationStatInfo);
6503 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6504 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6505 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6511 VmaPostprocessCalcStatInfo(pStats->
total);
6512 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6513 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6514 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6515 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6518 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6520 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
6522 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
6524 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6526 for(uint32_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
6528 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6529 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6530 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6534 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6535 AllocationVectorType* pOwnAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6536 for(
size_t ownAllocIndex = pOwnAllocationsVector->size(); ownAllocIndex--; )
6538 VmaAllocation hAlloc = (*pOwnAllocationsVector)[ownAllocIndex];
6539 hAlloc->OwnAllocUnmapPersistentlyMappedMemory(
this);
6545 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6546 pBlockVector->UnmapPersistentlyMappedMemory();
6553 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6554 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6556 m_Pools[poolIndex]->GetBlockVector().UnmapPersistentlyMappedMemory();
6563 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
6565 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
6566 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
6568 VkResult finalResult = VK_SUCCESS;
6569 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6573 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6574 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6576 m_Pools[poolIndex]->GetBlockVector().MapPersistentlyMappedMemory();
6580 for(uint32_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
6582 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6583 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6584 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6588 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6589 AllocationVectorType* pAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6590 for(
size_t ownAllocIndex = 0, ownAllocCount = pAllocationsVector->size(); ownAllocIndex < ownAllocCount; ++ownAllocIndex)
6592 VmaAllocation hAlloc = (*pAllocationsVector)[ownAllocIndex];
6593 hAlloc->OwnAllocMapPersistentlyMappedMemory(
this);
6599 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6600 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
6601 if(localResult != VK_SUCCESS)
6603 finalResult = localResult;
6615 VkResult VmaAllocator_T::Defragment(
6616 VmaAllocation* pAllocations,
6617 size_t allocationCount,
6618 VkBool32* pAllocationsChanged,
6622 if(pAllocationsChanged != VMA_NULL)
6624 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
6626 if(pDefragmentationStats != VMA_NULL)
6628 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
6631 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
6633 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
6634 return VK_ERROR_MEMORY_MAP_FAILED;
6637 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
6639 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
6641 const size_t poolCount = m_Pools.size();
6644 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
6646 VmaAllocation hAlloc = pAllocations[allocIndex];
6648 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
6650 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
6652 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
6654 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
6656 VmaBlockVector* pAllocBlockVector =
nullptr;
6658 const VmaPool hAllocPool = hAlloc->GetPool();
6660 if(hAllocPool != VK_NULL_HANDLE)
6662 pAllocBlockVector = &hAllocPool->GetBlockVector();
6667 pAllocBlockVector = m_pBlockVectors[memTypeIndex][hAlloc->GetBlockVectorType()];
6670 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
6672 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
6673 &pAllocationsChanged[allocIndex] : VMA_NULL;
6674 pDefragmentator->AddAllocation(hAlloc, pChanged);
6678 VkResult result = VK_SUCCESS;
6682 VkDeviceSize maxBytesToMove = SIZE_MAX;
6683 uint32_t maxAllocationsToMove = UINT32_MAX;
6684 if(pDefragmentationInfo != VMA_NULL)
6691 for(uint32_t memTypeIndex = 0;
6692 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
6696 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6698 for(uint32_t blockVectorType = 0;
6699 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
6702 result = m_pBlockVectors[memTypeIndex][blockVectorType]->Defragment(
6703 pDefragmentationStats,
6705 maxAllocationsToMove);
6711 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
6713 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
6714 pDefragmentationStats,
6716 maxAllocationsToMove);
6722 for(
size_t poolIndex = poolCount; poolIndex--; )
6724 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
6728 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
6730 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6732 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
6734 m_pBlockVectors[memTypeIndex][blockVectorType]->DestroyDefragmentator();
6742 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
6744 if(hAllocation->CanBecomeLost())
6750 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
6751 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
6754 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6758 pAllocationInfo->
offset = 0;
6759 pAllocationInfo->
size = hAllocation->GetSize();
6761 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6764 else if(localLastUseFrameIndex == localCurrFrameIndex)
6766 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6767 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6768 pAllocationInfo->
offset = hAllocation->GetOffset();
6769 pAllocationInfo->
size = hAllocation->GetSize();
6770 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6771 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6776 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
6778 localLastUseFrameIndex = localCurrFrameIndex;
6786 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6787 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6788 pAllocationInfo->
offset = hAllocation->GetOffset();
6789 pAllocationInfo->
size = hAllocation->GetSize();
6790 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6791 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6795 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
6797 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
6810 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
6812 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
6813 if(res != VK_SUCCESS)
6815 vma_delete(
this, *pPool);
6822 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6823 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
6829 void VmaAllocator_T::DestroyPool(VmaPool pool)
6833 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6834 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
6835 VMA_ASSERT(success &&
"Pool not found in Allocator.");
6838 vma_delete(
this, pool);
6841 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
6843 pool->m_BlockVector.GetPoolStats(pPoolStats);
6846 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
6848 m_CurrentFrameIndex.store(frameIndex);
6851 void VmaAllocator_T::MakePoolAllocationsLost(
6853 size_t* pLostAllocationCount)
6855 hPool->m_BlockVector.MakePoolAllocationsLost(
6856 m_CurrentFrameIndex.load(),
6857 pLostAllocationCount);
6860 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
6862 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
6863 (*pAllocation)->InitLost();
6866 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
6868 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
6871 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6873 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6874 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
6876 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6877 if(res == VK_SUCCESS)
6879 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
6884 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
6889 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6892 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
6894 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
6900 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
6902 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
6904 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
6907 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
6909 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
6910 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6912 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6913 m_HeapSizeLimit[heapIndex] += size;
6917 void VmaAllocator_T::FreeOwnMemory(VmaAllocation allocation)
6919 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_OWN);
6921 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6923 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6924 AllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex][allocation->GetBlockVectorType()];
6925 VMA_ASSERT(pOwnAllocations);
6926 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pOwnAllocations, allocation);
6927 VMA_ASSERT(success);
6930 VkDeviceMemory hMemory = allocation->GetMemory();
6932 if(allocation->GetMappedData() != VMA_NULL)
6934 vkUnmapMemory(m_hDevice, hMemory);
6937 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
6939 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
6942 #if VMA_STATS_STRING_ENABLED 6944 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
6946 bool ownAllocationsStarted =
false;
6947 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6949 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6950 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6952 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6953 VMA_ASSERT(pOwnAllocVector);
6954 if(pOwnAllocVector->empty() ==
false)
6956 if(ownAllocationsStarted ==
false)
6958 ownAllocationsStarted =
true;
6959 json.WriteString(
"OwnAllocations");
6963 json.BeginString(
"Type ");
6964 json.ContinueString(memTypeIndex);
6965 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
6967 json.ContinueString(
" Mapped");
6973 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
6975 const VmaAllocation hAlloc = (*pOwnAllocVector)[i];
6976 json.BeginObject(
true);
6978 json.WriteString(
"Size");
6979 json.WriteNumber(hAlloc->GetSize());
6981 json.WriteString(
"Type");
6982 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
6991 if(ownAllocationsStarted)
6997 bool allocationsStarted =
false;
6998 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7000 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7002 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
7004 if(allocationsStarted ==
false)
7006 allocationsStarted =
true;
7007 json.WriteString(
"DefaultPools");
7011 json.BeginString(
"Type ");
7012 json.ContinueString(memTypeIndex);
7013 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7015 json.ContinueString(
" Mapped");
7019 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(json);
7023 if(allocationsStarted)
7030 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7031 const size_t poolCount = m_Pools.size();
7034 json.WriteString(
"Pools");
7036 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7038 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7045 #endif // #if VMA_STATS_STRING_ENABLED 7047 static VkResult AllocateMemoryForImage(
7048 VmaAllocator allocator,
7051 VmaSuballocationType suballocType,
7052 VmaAllocation* pAllocation)
7054 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7056 VkMemoryRequirements vkMemReq = {};
7057 (*allocator->GetVulkanFunctions().vkGetImageMemoryRequirements)(allocator->m_hDevice, image, &vkMemReq);
7059 return allocator->AllocateMemory(
7061 *pAllocationCreateInfo,
7071 VmaAllocator* pAllocator)
7073 VMA_ASSERT(pCreateInfo && pAllocator);
7074 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7080 VmaAllocator allocator)
7082 if(allocator != VK_NULL_HANDLE)
7084 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7085 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7086 vma_delete(&allocationCallbacks, allocator);
7091 VmaAllocator allocator,
7092 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7094 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7095 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7099 VmaAllocator allocator,
7100 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7102 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7103 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7107 VmaAllocator allocator,
7108 uint32_t memoryTypeIndex,
7109 VkMemoryPropertyFlags* pFlags)
7111 VMA_ASSERT(allocator && pFlags);
7112 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7113 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7117 VmaAllocator allocator,
7118 uint32_t frameIndex)
7120 VMA_ASSERT(allocator);
7121 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7123 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7125 allocator->SetCurrentFrameIndex(frameIndex);
7129 VmaAllocator allocator,
7132 VMA_ASSERT(allocator && pStats);
7133 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7134 allocator->CalculateStats(pStats);
7137 #if VMA_STATS_STRING_ENABLED 7140 VmaAllocator allocator,
7141 char** ppStatsString,
7142 VkBool32 detailedMap)
7144 VMA_ASSERT(allocator && ppStatsString);
7145 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7147 VmaStringBuilder sb(allocator);
7149 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7153 allocator->CalculateStats(&stats);
7155 json.WriteString(
"Total");
7156 VmaPrintStatInfo(json, stats.
total);
7158 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7160 json.BeginString(
"Heap ");
7161 json.ContinueString(heapIndex);
7165 json.WriteString(
"Size");
7166 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7168 json.WriteString(
"Flags");
7169 json.BeginArray(
true);
7170 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7172 json.WriteString(
"DEVICE_LOCAL");
7178 json.WriteString(
"Stats");
7179 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7182 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7184 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7186 json.BeginString(
"Type ");
7187 json.ContinueString(typeIndex);
7192 json.WriteString(
"Flags");
7193 json.BeginArray(
true);
7194 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7195 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7197 json.WriteString(
"DEVICE_LOCAL");
7199 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7201 json.WriteString(
"HOST_VISIBLE");
7203 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7205 json.WriteString(
"HOST_COHERENT");
7207 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7209 json.WriteString(
"HOST_CACHED");
7211 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7213 json.WriteString(
"LAZILY_ALLOCATED");
7219 json.WriteString(
"Stats");
7220 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7229 if(detailedMap == VK_TRUE)
7231 allocator->PrintDetailedMap(json);
7237 const size_t len = sb.GetLength();
7238 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7241 memcpy(pChars, sb.GetData(), len);
7244 *ppStatsString = pChars;
7248 VmaAllocator allocator,
7251 if(pStatsString != VMA_NULL)
7253 VMA_ASSERT(allocator);
7254 size_t len = strlen(pStatsString);
7255 vma_delete_array(allocator, pStatsString, len + 1);
7259 #endif // #if VMA_STATS_STRING_ENABLED 7264 VmaAllocator allocator,
7265 uint32_t memoryTypeBits,
7267 uint32_t* pMemoryTypeIndex)
7269 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7270 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7271 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7273 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7275 if(preferredFlags == 0)
7277 preferredFlags = requiredFlags;
7280 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7283 switch(pAllocationCreateInfo->
usage)
7288 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7291 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7294 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7295 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7298 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7299 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7307 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7310 *pMemoryTypeIndex = UINT32_MAX;
7311 uint32_t minCost = UINT32_MAX;
7312 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7313 memTypeIndex < allocator->GetMemoryTypeCount();
7314 ++memTypeIndex, memTypeBit <<= 1)
7317 if((memTypeBit & memoryTypeBits) != 0)
7319 const VkMemoryPropertyFlags currFlags =
7320 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7322 if((requiredFlags & ~currFlags) == 0)
7325 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7327 if(currCost < minCost)
7329 *pMemoryTypeIndex = memTypeIndex;
7339 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7343 VmaAllocator allocator,
7347 VMA_ASSERT(allocator && pCreateInfo && pPool);
7349 VMA_DEBUG_LOG(
"vmaCreatePool");
7351 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7353 return allocator->CreatePool(pCreateInfo, pPool);
7357 VmaAllocator allocator,
7360 VMA_ASSERT(allocator && pool);
7362 VMA_DEBUG_LOG(
"vmaDestroyPool");
7364 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7366 allocator->DestroyPool(pool);
7370 VmaAllocator allocator,
7374 VMA_ASSERT(allocator && pool && pPoolStats);
7376 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7378 allocator->GetPoolStats(pool, pPoolStats);
7382 VmaAllocator allocator,
7384 size_t* pLostAllocationCount)
7386 VMA_ASSERT(allocator && pool);
7388 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7390 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7394 VmaAllocator allocator,
7395 const VkMemoryRequirements* pVkMemoryRequirements,
7397 VmaAllocation* pAllocation,
7400 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7402 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7404 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7406 VkResult result = allocator->AllocateMemory(
7407 *pVkMemoryRequirements,
7409 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7412 if(pAllocationInfo && result == VK_SUCCESS)
7414 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7421 VmaAllocator allocator,
7424 VmaAllocation* pAllocation,
7427 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7429 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7431 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7433 VkMemoryRequirements vkMemReq = {};
7434 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, buffer, &vkMemReq);
7436 VkResult result = allocator->AllocateMemory(
7439 VMA_SUBALLOCATION_TYPE_BUFFER,
7442 if(pAllocationInfo && result == VK_SUCCESS)
7444 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7451 VmaAllocator allocator,
7454 VmaAllocation* pAllocation,
7457 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7459 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7461 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7463 VkResult result = AllocateMemoryForImage(
7467 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7470 if(pAllocationInfo && result == VK_SUCCESS)
7472 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7479 VmaAllocator allocator,
7480 VmaAllocation allocation)
7482 VMA_ASSERT(allocator && allocation);
7484 VMA_DEBUG_LOG(
"vmaFreeMemory");
7486 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7488 allocator->FreeMemory(allocation);
7492 VmaAllocator allocator,
7493 VmaAllocation allocation,
7496 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7498 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7500 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7504 VmaAllocator allocator,
7505 VmaAllocation allocation,
7508 VMA_ASSERT(allocator && allocation);
7510 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7512 allocation->SetUserData(pUserData);
7516 VmaAllocator allocator,
7517 VmaAllocation* pAllocation)
7519 VMA_ASSERT(allocator && pAllocation);
7521 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7523 allocator->CreateLostAllocation(pAllocation);
7527 VmaAllocator allocator,
7528 VmaAllocation allocation,
7531 VMA_ASSERT(allocator && allocation && ppData);
7533 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7535 return vkMapMemory(allocator->m_hDevice, allocation->GetMemory(),
7536 allocation->GetOffset(), allocation->GetSize(), 0, ppData);
7540 VmaAllocator allocator,
7541 VmaAllocation allocation)
7543 VMA_ASSERT(allocator && allocation);
7545 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7547 vkUnmapMemory(allocator->m_hDevice, allocation->GetMemory());
7552 VMA_ASSERT(allocator);
7554 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7556 allocator->UnmapPersistentlyMappedMemory();
7561 VMA_ASSERT(allocator);
7563 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7565 return allocator->MapPersistentlyMappedMemory();
7569 VmaAllocator allocator,
7570 VmaAllocation* pAllocations,
7571 size_t allocationCount,
7572 VkBool32* pAllocationsChanged,
7576 VMA_ASSERT(allocator && pAllocations);
7578 VMA_DEBUG_LOG(
"vmaDefragment");
7580 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7582 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
7586 VmaAllocator allocator,
7587 const VkBufferCreateInfo* pBufferCreateInfo,
7590 VmaAllocation* pAllocation,
7593 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
7595 VMA_DEBUG_LOG(
"vmaCreateBuffer");
7597 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7599 *pBuffer = VK_NULL_HANDLE;
7600 *pAllocation = VK_NULL_HANDLE;
7603 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
7604 allocator->m_hDevice,
7606 allocator->GetAllocationCallbacks(),
7611 VkMemoryRequirements vkMemReq = {};
7612 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, *pBuffer, &vkMemReq);
7615 res = allocator->AllocateMemory(
7617 *pAllocationCreateInfo,
7618 VMA_SUBALLOCATION_TYPE_BUFFER,
7623 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
7624 allocator->m_hDevice,
7626 (*pAllocation)->GetMemory(),
7627 (*pAllocation)->GetOffset());
7631 if(pAllocationInfo != VMA_NULL)
7633 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7637 allocator->FreeMemory(*pAllocation);
7638 *pAllocation = VK_NULL_HANDLE;
7641 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
7642 *pBuffer = VK_NULL_HANDLE;
7649 VmaAllocator allocator,
7651 VmaAllocation allocation)
7653 if(buffer != VK_NULL_HANDLE)
7655 VMA_ASSERT(allocator);
7657 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
7659 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7661 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
7663 allocator->FreeMemory(allocation);
7668 VmaAllocator allocator,
7669 const VkImageCreateInfo* pImageCreateInfo,
7672 VmaAllocation* pAllocation,
7675 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
7677 VMA_DEBUG_LOG(
"vmaCreateImage");
7679 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7681 *pImage = VK_NULL_HANDLE;
7682 *pAllocation = VK_NULL_HANDLE;
7685 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
7686 allocator->m_hDevice,
7688 allocator->GetAllocationCallbacks(),
7692 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
7693 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
7694 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
7697 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
7701 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
7702 allocator->m_hDevice,
7704 (*pAllocation)->GetMemory(),
7705 (*pAllocation)->GetOffset());
7709 if(pAllocationInfo != VMA_NULL)
7711 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7715 allocator->FreeMemory(*pAllocation);
7716 *pAllocation = VK_NULL_HANDLE;
7719 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
7720 *pImage = VK_NULL_HANDLE;
7727 VmaAllocator allocator,
7729 VmaAllocation allocation)
7731 if(image != VK_NULL_HANDLE)
7733 VMA_ASSERT(allocator);
7735 VMA_DEBUG_LOG(
"vmaDestroyImage");
7737 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7739 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
7741 allocator->FreeMemory(allocation);
7745 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:434
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:457
Definition: vk_mem_alloc.h:786
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
uint32_t BlockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:570
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:444
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:637
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:438
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:914
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1067
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:838
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:686
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:719
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:403
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:469
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:788
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:516
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:451
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:466
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:441
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:431
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1071
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:486
VmaStatInfo total
Definition: vk_mem_alloc.h:588
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1079
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:702
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1062
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:442
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:460
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:792
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:924
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:439
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:721
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:808
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:844
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:795
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
struct VmaVulkanFunctions VmaVulkanFunctions
Definition: vk_mem_alloc.h:695
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1057
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VkDeviceSize AllocationSizeMax
Definition: vk_mem_alloc.h:579
Definition: vk_mem_alloc.h:766
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1075
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:440
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:584
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:675
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1077
VmaMemoryUsage
Definition: vk_mem_alloc.h:623
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:713
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:427
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VmaAllocatorFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:422
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:854
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:435
Definition: vk_mem_alloc.h:567
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:803
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:414
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:418
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:798
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:580
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:397
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:708
Definition: vk_mem_alloc.h:699
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:437
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:816
VkDeviceSize AllocationSizeMin
Definition: vk_mem_alloc.h:579
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:472
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:847
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:726
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:504
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:586
VkDeviceSize AllocationSizeAvg
Definition: vk_mem_alloc.h:579
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:446
uint32_t AllocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:572
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:416
Definition: vk_mem_alloc.h:693
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:445
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:830
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:454
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
VkDeviceSize UsedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:576
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:935
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:654
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:463
uint32_t UnusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:574
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:835
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:631
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaStats VmaStats
General statistics from current state of Allocator.
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:580
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:919
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1073
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
Definition: vk_mem_alloc.h:433
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:697
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:443
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:447
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:757
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:930
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
No intended memory usage specified.
Definition: vk_mem_alloc.h:626
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:436
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
Definition: vk_mem_alloc.h:638
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:900
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:634
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:642
Definition: vk_mem_alloc.h:429
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:665
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:628
struct VmaStatInfo VmaStatInfo
VkDeviceSize UnusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:578
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:587
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:841
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:784
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:580
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:905
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.