23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 393 #include <vulkan/vulkan.h> 400 VK_DEFINE_HANDLE(VmaAllocator)
404 VmaAllocator allocator,
406 VkDeviceMemory memory,
410 VmaAllocator allocator,
412 VkDeviceMemory memory,
534 VmaAllocator* pAllocator);
538 VmaAllocator allocator);
545 VmaAllocator allocator,
546 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
553 VmaAllocator allocator,
554 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
563 VmaAllocator allocator,
564 uint32_t memoryTypeIndex,
565 VkMemoryPropertyFlags* pFlags);
576 VmaAllocator allocator,
577 uint32_t frameIndex);
607 VmaAllocator allocator,
610 #define VMA_STATS_STRING_ENABLED 1 612 #if VMA_STATS_STRING_ENABLED 618 VmaAllocator allocator,
619 char** ppStatsString,
620 VkBool32 detailedMap);
623 VmaAllocator allocator,
626 #endif // #if VMA_STATS_STRING_ENABLED 635 VK_DEFINE_HANDLE(VmaPool)
758 VmaAllocator allocator,
759 uint32_t memoryTypeBits,
761 uint32_t* pMemoryTypeIndex);
878 VmaAllocator allocator,
885 VmaAllocator allocator,
895 VmaAllocator allocator,
906 VmaAllocator allocator,
908 size_t* pLostAllocationCount);
910 VK_DEFINE_HANDLE(VmaAllocation)
963 VmaAllocator allocator,
964 const VkMemoryRequirements* pVkMemoryRequirements,
966 VmaAllocation* pAllocation,
976 VmaAllocator allocator,
979 VmaAllocation* pAllocation,
984 VmaAllocator allocator,
987 VmaAllocation* pAllocation,
992 VmaAllocator allocator,
993 VmaAllocation allocation);
997 VmaAllocator allocator,
998 VmaAllocation allocation,
1003 VmaAllocator allocator,
1004 VmaAllocation allocation,
1018 VmaAllocator allocator,
1019 VmaAllocation* pAllocation);
1030 VmaAllocator allocator,
1031 VmaAllocation allocation,
1035 VmaAllocator allocator,
1036 VmaAllocation allocation);
1167 VmaAllocator allocator,
1168 VmaAllocation* pAllocations,
1169 size_t allocationCount,
1170 VkBool32* pAllocationsChanged,
1200 VmaAllocator allocator,
1201 const VkBufferCreateInfo* pBufferCreateInfo,
1204 VmaAllocation* pAllocation,
1216 VmaAllocator allocator,
1218 VmaAllocation allocation);
1222 VmaAllocator allocator,
1223 const VkImageCreateInfo* pImageCreateInfo,
1226 VmaAllocation* pAllocation,
1238 VmaAllocator allocator,
1240 VmaAllocation allocation);
1248 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1251 #ifdef __INTELLISENSE__ 1252 #define VMA_IMPLEMENTATION 1255 #ifdef VMA_IMPLEMENTATION 1256 #undef VMA_IMPLEMENTATION 1278 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1279 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1291 #if VMA_USE_STL_CONTAINERS 1292 #define VMA_USE_STL_VECTOR 1 1293 #define VMA_USE_STL_UNORDERED_MAP 1 1294 #define VMA_USE_STL_LIST 1 1297 #if VMA_USE_STL_VECTOR 1301 #if VMA_USE_STL_UNORDERED_MAP 1302 #include <unordered_map> 1305 #if VMA_USE_STL_LIST 1314 #include <algorithm> 1318 #if !defined(_WIN32) 1325 #define VMA_ASSERT(expr) assert(expr) 1327 #define VMA_ASSERT(expr) 1333 #ifndef VMA_HEAVY_ASSERT 1335 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1337 #define VMA_HEAVY_ASSERT(expr) 1343 #define VMA_NULL nullptr 1346 #ifndef VMA_ALIGN_OF 1347 #define VMA_ALIGN_OF(type) (__alignof(type)) 1350 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1352 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1354 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1358 #ifndef VMA_SYSTEM_FREE 1360 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1362 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1367 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1371 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1375 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1379 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1382 #ifndef VMA_DEBUG_LOG 1383 #define VMA_DEBUG_LOG(format, ...) 1393 #if VMA_STATS_STRING_ENABLED 1394 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1396 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1398 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1400 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1402 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1404 snprintf(outStr, strLen,
"%p", ptr);
1414 void Lock() { m_Mutex.lock(); }
1415 void Unlock() { m_Mutex.unlock(); }
1419 #define VMA_MUTEX VmaMutex 1430 #ifndef VMA_ATOMIC_UINT32 1431 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1434 #ifndef VMA_BEST_FIT 1447 #define VMA_BEST_FIT (1) 1450 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 1455 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 1458 #ifndef VMA_DEBUG_ALIGNMENT 1463 #define VMA_DEBUG_ALIGNMENT (1) 1466 #ifndef VMA_DEBUG_MARGIN 1471 #define VMA_DEBUG_MARGIN (0) 1474 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1479 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1482 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1487 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1490 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1491 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1495 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1496 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1500 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1501 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1505 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1511 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1512 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1515 static inline uint32_t CountBitsSet(uint32_t v)
1517 uint32_t c = v - ((v >> 1) & 0x55555555);
1518 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1519 c = ((c >> 4) + c) & 0x0F0F0F0F;
1520 c = ((c >> 8) + c) & 0x00FF00FF;
1521 c = ((c >> 16) + c) & 0x0000FFFF;
1527 template <
typename T>
1528 static inline T VmaAlignUp(T val, T align)
1530 return (val + align - 1) / align * align;
1534 template <
typename T>
1535 inline T VmaRoundDiv(T x, T y)
1537 return (x + (y / (T)2)) / y;
1542 template<
typename Iterator,
typename Compare>
1543 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1545 Iterator centerValue = end; --centerValue;
1546 Iterator insertIndex = beg;
1547 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1549 if(cmp(*memTypeIndex, *centerValue))
1551 if(insertIndex != memTypeIndex)
1553 VMA_SWAP(*memTypeIndex, *insertIndex);
1558 if(insertIndex != centerValue)
1560 VMA_SWAP(*insertIndex, *centerValue);
1565 template<
typename Iterator,
typename Compare>
1566 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1570 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1571 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1572 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1576 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1578 #endif // #ifndef VMA_SORT 1587 static inline bool VmaBlocksOnSamePage(
1588 VkDeviceSize resourceAOffset,
1589 VkDeviceSize resourceASize,
1590 VkDeviceSize resourceBOffset,
1591 VkDeviceSize pageSize)
1593 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1594 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1595 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1596 VkDeviceSize resourceBStart = resourceBOffset;
1597 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1598 return resourceAEndPage == resourceBStartPage;
1601 enum VmaSuballocationType
1603 VMA_SUBALLOCATION_TYPE_FREE = 0,
1604 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1605 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1606 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1607 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1608 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1609 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1618 static inline bool VmaIsBufferImageGranularityConflict(
1619 VmaSuballocationType suballocType1,
1620 VmaSuballocationType suballocType2)
1622 if(suballocType1 > suballocType2)
1624 VMA_SWAP(suballocType1, suballocType2);
1627 switch(suballocType1)
1629 case VMA_SUBALLOCATION_TYPE_FREE:
1631 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1633 case VMA_SUBALLOCATION_TYPE_BUFFER:
1635 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1636 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1637 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1639 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1640 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1641 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1642 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1644 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1645 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1657 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1658 m_pMutex(useMutex ? &mutex : VMA_NULL)
1675 VMA_MUTEX* m_pMutex;
1678 #if VMA_DEBUG_GLOBAL_MUTEX 1679 static VMA_MUTEX gDebugGlobalMutex;
1680 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1682 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1686 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1697 template <
typename IterT,
typename KeyT,
typename CmpT>
1698 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1700 size_t down = 0, up = (end - beg);
1703 const size_t mid = (down + up) / 2;
1704 if(cmp(*(beg+mid), key))
1719 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1721 if((pAllocationCallbacks != VMA_NULL) &&
1722 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1724 return (*pAllocationCallbacks->pfnAllocation)(
1725 pAllocationCallbacks->pUserData,
1728 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1732 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1736 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1738 if((pAllocationCallbacks != VMA_NULL) &&
1739 (pAllocationCallbacks->pfnFree != VMA_NULL))
1741 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1745 VMA_SYSTEM_FREE(ptr);
1749 template<
typename T>
1750 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1752 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1755 template<
typename T>
1756 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1758 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1761 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1763 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1765 template<
typename T>
1766 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1769 VmaFree(pAllocationCallbacks, ptr);
1772 template<
typename T>
1773 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1777 for(
size_t i = count; i--; )
1781 VmaFree(pAllocationCallbacks, ptr);
1786 template<
typename T>
1787 class VmaStlAllocator
1790 const VkAllocationCallbacks*
const m_pCallbacks;
1791 typedef T value_type;
1793 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1794 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1796 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1797 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1799 template<
typename U>
1800 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1802 return m_pCallbacks == rhs.m_pCallbacks;
1804 template<
typename U>
1805 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1807 return m_pCallbacks != rhs.m_pCallbacks;
1810 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1813 #if VMA_USE_STL_VECTOR 1815 #define VmaVector std::vector 1817 template<
typename T,
typename allocatorT>
1818 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1820 vec.insert(vec.begin() + index, item);
1823 template<
typename T,
typename allocatorT>
1824 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1826 vec.erase(vec.begin() + index);
1829 #else // #if VMA_USE_STL_VECTOR 1834 template<
typename T,
typename AllocatorT>
1838 typedef T value_type;
1840 VmaVector(
const AllocatorT& allocator) :
1841 m_Allocator(allocator),
1848 VmaVector(
size_t count,
const AllocatorT& allocator) :
1849 m_Allocator(allocator),
1850 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1856 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1857 m_Allocator(src.m_Allocator),
1858 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
1859 m_Count(src.m_Count),
1860 m_Capacity(src.m_Count)
1864 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1870 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1873 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1877 resize(rhs.m_Count);
1880 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1886 bool empty()
const {
return m_Count == 0; }
1887 size_t size()
const {
return m_Count; }
1888 T* data() {
return m_pArray; }
1889 const T* data()
const {
return m_pArray; }
1891 T& operator[](
size_t index)
1893 VMA_HEAVY_ASSERT(index < m_Count);
1894 return m_pArray[index];
1896 const T& operator[](
size_t index)
const 1898 VMA_HEAVY_ASSERT(index < m_Count);
1899 return m_pArray[index];
1904 VMA_HEAVY_ASSERT(m_Count > 0);
1907 const T& front()
const 1909 VMA_HEAVY_ASSERT(m_Count > 0);
1914 VMA_HEAVY_ASSERT(m_Count > 0);
1915 return m_pArray[m_Count - 1];
1917 const T& back()
const 1919 VMA_HEAVY_ASSERT(m_Count > 0);
1920 return m_pArray[m_Count - 1];
1923 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1925 newCapacity = VMA_MAX(newCapacity, m_Count);
1927 if((newCapacity < m_Capacity) && !freeMemory)
1929 newCapacity = m_Capacity;
1932 if(newCapacity != m_Capacity)
1934 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1937 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1939 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1940 m_Capacity = newCapacity;
1941 m_pArray = newArray;
1945 void resize(
size_t newCount,
bool freeMemory =
false)
1947 size_t newCapacity = m_Capacity;
1948 if(newCount > m_Capacity)
1950 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1954 newCapacity = newCount;
1957 if(newCapacity != m_Capacity)
1959 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1960 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1961 if(elementsToCopy != 0)
1963 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1965 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1966 m_Capacity = newCapacity;
1967 m_pArray = newArray;
1973 void clear(
bool freeMemory =
false)
1975 resize(0, freeMemory);
1978 void insert(
size_t index,
const T& src)
1980 VMA_HEAVY_ASSERT(index <= m_Count);
1981 const size_t oldCount = size();
1982 resize(oldCount + 1);
1983 if(index < oldCount)
1985 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1987 m_pArray[index] = src;
1990 void remove(
size_t index)
1992 VMA_HEAVY_ASSERT(index < m_Count);
1993 const size_t oldCount = size();
1994 if(index < oldCount - 1)
1996 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1998 resize(oldCount - 1);
2001 void push_back(
const T& src)
2003 const size_t newIndex = size();
2004 resize(newIndex + 1);
2005 m_pArray[newIndex] = src;
2010 VMA_HEAVY_ASSERT(m_Count > 0);
2014 void push_front(
const T& src)
2021 VMA_HEAVY_ASSERT(m_Count > 0);
2025 typedef T* iterator;
2027 iterator begin() {
return m_pArray; }
2028 iterator end() {
return m_pArray + m_Count; }
2031 AllocatorT m_Allocator;
2037 template<
typename T,
typename allocatorT>
2038 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2040 vec.insert(index, item);
2043 template<
typename T,
typename allocatorT>
2044 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2049 #endif // #if VMA_USE_STL_VECTOR 2051 template<
typename CmpLess,
typename VectorT>
2052 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2054 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2056 vector.data() + vector.size(),
2058 CmpLess()) - vector.data();
2059 VmaVectorInsert(vector, indexToInsert, value);
2060 return indexToInsert;
2063 template<
typename CmpLess,
typename VectorT>
2064 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2067 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2072 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2074 size_t indexToRemove = it - vector.begin();
2075 VmaVectorRemove(vector, indexToRemove);
2081 template<
typename CmpLess,
typename VectorT>
2082 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2085 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2087 vector.data() + vector.size(),
2090 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2092 return it - vector.begin();
2096 return vector.size();
2108 template<
typename T>
2109 class VmaPoolAllocator
2112 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2113 ~VmaPoolAllocator();
2121 uint32_t NextFreeIndex;
2128 uint32_t FirstFreeIndex;
2131 const VkAllocationCallbacks* m_pAllocationCallbacks;
2132 size_t m_ItemsPerBlock;
2133 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2135 ItemBlock& CreateNewBlock();
2138 template<
typename T>
2139 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2140 m_pAllocationCallbacks(pAllocationCallbacks),
2141 m_ItemsPerBlock(itemsPerBlock),
2142 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2144 VMA_ASSERT(itemsPerBlock > 0);
2147 template<
typename T>
2148 VmaPoolAllocator<T>::~VmaPoolAllocator()
2153 template<
typename T>
2154 void VmaPoolAllocator<T>::Clear()
2156 for(
size_t i = m_ItemBlocks.size(); i--; )
2157 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2158 m_ItemBlocks.clear();
2161 template<
typename T>
2162 T* VmaPoolAllocator<T>::Alloc()
2164 for(
size_t i = m_ItemBlocks.size(); i--; )
2166 ItemBlock& block = m_ItemBlocks[i];
2168 if(block.FirstFreeIndex != UINT32_MAX)
2170 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2171 block.FirstFreeIndex = pItem->NextFreeIndex;
2172 return &pItem->Value;
2177 ItemBlock& newBlock = CreateNewBlock();
2178 Item*
const pItem = &newBlock.pItems[0];
2179 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2180 return &pItem->Value;
2183 template<
typename T>
2184 void VmaPoolAllocator<T>::Free(T* ptr)
2187 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2189 ItemBlock& block = m_ItemBlocks[i];
2193 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2196 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2198 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2199 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2200 block.FirstFreeIndex = index;
2204 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2207 template<
typename T>
2208 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2210 ItemBlock newBlock = {
2211 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2213 m_ItemBlocks.push_back(newBlock);
2216 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2217 newBlock.pItems[i].NextFreeIndex = i + 1;
2218 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2219 return m_ItemBlocks.back();
2225 #if VMA_USE_STL_LIST 2227 #define VmaList std::list 2229 #else // #if VMA_USE_STL_LIST 2231 template<
typename T>
2240 template<
typename T>
2244 typedef VmaListItem<T> ItemType;
2246 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2250 size_t GetCount()
const {
return m_Count; }
2251 bool IsEmpty()
const {
return m_Count == 0; }
2253 ItemType* Front() {
return m_pFront; }
2254 const ItemType* Front()
const {
return m_pFront; }
2255 ItemType* Back() {
return m_pBack; }
2256 const ItemType* Back()
const {
return m_pBack; }
2258 ItemType* PushBack();
2259 ItemType* PushFront();
2260 ItemType* PushBack(
const T& value);
2261 ItemType* PushFront(
const T& value);
2266 ItemType* InsertBefore(ItemType* pItem);
2268 ItemType* InsertAfter(ItemType* pItem);
2270 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2271 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2273 void Remove(ItemType* pItem);
2276 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2277 VmaPoolAllocator<ItemType> m_ItemAllocator;
2283 VmaRawList(
const VmaRawList<T>& src);
2284 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2287 template<
typename T>
2288 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2289 m_pAllocationCallbacks(pAllocationCallbacks),
2290 m_ItemAllocator(pAllocationCallbacks, 128),
2297 template<
typename T>
2298 VmaRawList<T>::~VmaRawList()
2304 template<
typename T>
2305 void VmaRawList<T>::Clear()
2307 if(IsEmpty() ==
false)
2309 ItemType* pItem = m_pBack;
2310 while(pItem != VMA_NULL)
2312 ItemType*
const pPrevItem = pItem->pPrev;
2313 m_ItemAllocator.Free(pItem);
2316 m_pFront = VMA_NULL;
2322 template<
typename T>
2323 VmaListItem<T>* VmaRawList<T>::PushBack()
2325 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2326 pNewItem->pNext = VMA_NULL;
2329 pNewItem->pPrev = VMA_NULL;
2330 m_pFront = pNewItem;
2336 pNewItem->pPrev = m_pBack;
2337 m_pBack->pNext = pNewItem;
2344 template<
typename T>
2345 VmaListItem<T>* VmaRawList<T>::PushFront()
2347 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2348 pNewItem->pPrev = VMA_NULL;
2351 pNewItem->pNext = VMA_NULL;
2352 m_pFront = pNewItem;
2358 pNewItem->pNext = m_pFront;
2359 m_pFront->pPrev = pNewItem;
2360 m_pFront = pNewItem;
2366 template<
typename T>
2367 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2369 ItemType*
const pNewItem = PushBack();
2370 pNewItem->Value = value;
2374 template<
typename T>
2375 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2377 ItemType*
const pNewItem = PushFront();
2378 pNewItem->Value = value;
2382 template<
typename T>
2383 void VmaRawList<T>::PopBack()
2385 VMA_HEAVY_ASSERT(m_Count > 0);
2386 ItemType*
const pBackItem = m_pBack;
2387 ItemType*
const pPrevItem = pBackItem->pPrev;
2388 if(pPrevItem != VMA_NULL)
2390 pPrevItem->pNext = VMA_NULL;
2392 m_pBack = pPrevItem;
2393 m_ItemAllocator.Free(pBackItem);
2397 template<
typename T>
2398 void VmaRawList<T>::PopFront()
2400 VMA_HEAVY_ASSERT(m_Count > 0);
2401 ItemType*
const pFrontItem = m_pFront;
2402 ItemType*
const pNextItem = pFrontItem->pNext;
2403 if(pNextItem != VMA_NULL)
2405 pNextItem->pPrev = VMA_NULL;
2407 m_pFront = pNextItem;
2408 m_ItemAllocator.Free(pFrontItem);
2412 template<
typename T>
2413 void VmaRawList<T>::Remove(ItemType* pItem)
2415 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2416 VMA_HEAVY_ASSERT(m_Count > 0);
2418 if(pItem->pPrev != VMA_NULL)
2420 pItem->pPrev->pNext = pItem->pNext;
2424 VMA_HEAVY_ASSERT(m_pFront == pItem);
2425 m_pFront = pItem->pNext;
2428 if(pItem->pNext != VMA_NULL)
2430 pItem->pNext->pPrev = pItem->pPrev;
2434 VMA_HEAVY_ASSERT(m_pBack == pItem);
2435 m_pBack = pItem->pPrev;
2438 m_ItemAllocator.Free(pItem);
2442 template<
typename T>
2443 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2445 if(pItem != VMA_NULL)
2447 ItemType*
const prevItem = pItem->pPrev;
2448 ItemType*
const newItem = m_ItemAllocator.Alloc();
2449 newItem->pPrev = prevItem;
2450 newItem->pNext = pItem;
2451 pItem->pPrev = newItem;
2452 if(prevItem != VMA_NULL)
2454 prevItem->pNext = newItem;
2458 VMA_HEAVY_ASSERT(m_pFront == pItem);
2468 template<
typename T>
2469 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2471 if(pItem != VMA_NULL)
2473 ItemType*
const nextItem = pItem->pNext;
2474 ItemType*
const newItem = m_ItemAllocator.Alloc();
2475 newItem->pNext = nextItem;
2476 newItem->pPrev = pItem;
2477 pItem->pNext = newItem;
2478 if(nextItem != VMA_NULL)
2480 nextItem->pPrev = newItem;
2484 VMA_HEAVY_ASSERT(m_pBack == pItem);
2494 template<
typename T>
2495 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2497 ItemType*
const newItem = InsertBefore(pItem);
2498 newItem->Value = value;
2502 template<
typename T>
2503 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2505 ItemType*
const newItem = InsertAfter(pItem);
2506 newItem->Value = value;
2510 template<
typename T,
typename AllocatorT>
2523 T& operator*()
const 2525 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2526 return m_pItem->Value;
2528 T* operator->()
const 2530 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2531 return &m_pItem->Value;
2534 iterator& operator++()
2536 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2537 m_pItem = m_pItem->pNext;
2540 iterator& operator--()
2542 if(m_pItem != VMA_NULL)
2544 m_pItem = m_pItem->pPrev;
2548 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2549 m_pItem = m_pList->Back();
2554 iterator operator++(
int)
2556 iterator result = *
this;
2560 iterator operator--(
int)
2562 iterator result = *
this;
2567 bool operator==(
const iterator& rhs)
const 2569 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2570 return m_pItem == rhs.m_pItem;
2572 bool operator!=(
const iterator& rhs)
const 2574 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2575 return m_pItem != rhs.m_pItem;
2579 VmaRawList<T>* m_pList;
2580 VmaListItem<T>* m_pItem;
2582 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2588 friend class VmaList<T, AllocatorT>;
2591 class const_iterator
2600 const_iterator(
const iterator& src) :
2601 m_pList(src.m_pList),
2602 m_pItem(src.m_pItem)
2606 const T& operator*()
const 2608 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2609 return m_pItem->Value;
2611 const T* operator->()
const 2613 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2614 return &m_pItem->Value;
2617 const_iterator& operator++()
2619 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2620 m_pItem = m_pItem->pNext;
2623 const_iterator& operator--()
2625 if(m_pItem != VMA_NULL)
2627 m_pItem = m_pItem->pPrev;
2631 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2632 m_pItem = m_pList->Back();
2637 const_iterator operator++(
int)
2639 const_iterator result = *
this;
2643 const_iterator operator--(
int)
2645 const_iterator result = *
this;
2650 bool operator==(
const const_iterator& rhs)
const 2652 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2653 return m_pItem == rhs.m_pItem;
2655 bool operator!=(
const const_iterator& rhs)
const 2657 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2658 return m_pItem != rhs.m_pItem;
2662 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2668 const VmaRawList<T>* m_pList;
2669 const VmaListItem<T>* m_pItem;
2671 friend class VmaList<T, AllocatorT>;
2674 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2676 bool empty()
const {
return m_RawList.IsEmpty(); }
2677 size_t size()
const {
return m_RawList.GetCount(); }
2679 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2680 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2682 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2683 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2685 void clear() { m_RawList.Clear(); }
2686 void push_back(
const T& value) { m_RawList.PushBack(value); }
2687 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2688 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2691 VmaRawList<T> m_RawList;
2694 #endif // #if VMA_USE_STL_LIST 2702 #if VMA_USE_STL_UNORDERED_MAP 2704 #define VmaPair std::pair 2706 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2707 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2709 #else // #if VMA_USE_STL_UNORDERED_MAP 2711 template<
typename T1,
typename T2>
2717 VmaPair() : first(), second() { }
2718 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2724 template<
typename KeyT,
typename ValueT>
2728 typedef VmaPair<KeyT, ValueT> PairType;
2729 typedef PairType* iterator;
2731 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2733 iterator begin() {
return m_Vector.begin(); }
2734 iterator end() {
return m_Vector.end(); }
2736 void insert(
const PairType& pair);
2737 iterator find(
const KeyT& key);
2738 void erase(iterator it);
2741 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2744 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2746 template<
typename FirstT,
typename SecondT>
2747 struct VmaPairFirstLess
2749 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2751 return lhs.first < rhs.first;
2753 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2755 return lhs.first < rhsFirst;
2759 template<
typename KeyT,
typename ValueT>
2760 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2762 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2764 m_Vector.data() + m_Vector.size(),
2766 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2767 VmaVectorInsert(m_Vector, indexToInsert, pair);
2770 template<
typename KeyT,
typename ValueT>
2771 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2773 PairType* it = VmaBinaryFindFirstNotLess(
2775 m_Vector.data() + m_Vector.size(),
2777 VmaPairFirstLess<KeyT, ValueT>());
2778 if((it != m_Vector.end()) && (it->first == key))
2784 return m_Vector.end();
2788 template<
typename KeyT,
typename ValueT>
2789 void VmaMap<KeyT, ValueT>::erase(iterator it)
2791 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2794 #endif // #if VMA_USE_STL_UNORDERED_MAP 2800 class VmaDeviceMemoryBlock;
2802 enum VMA_BLOCK_VECTOR_TYPE
2804 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2805 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2806 VMA_BLOCK_VECTOR_TYPE_COUNT
2812 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2813 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2816 struct VmaAllocation_T
2819 enum ALLOCATION_TYPE
2821 ALLOCATION_TYPE_NONE,
2822 ALLOCATION_TYPE_BLOCK,
2823 ALLOCATION_TYPE_OWN,
2826 VmaAllocation_T(uint32_t currentFrameIndex) :
2829 m_pUserData(VMA_NULL),
2830 m_Type(ALLOCATION_TYPE_NONE),
2831 m_SuballocationType(VMA_SUBALLOCATION_TYPE_UNKNOWN),
2832 m_LastUseFrameIndex(currentFrameIndex)
2836 void InitBlockAllocation(
2838 VmaDeviceMemoryBlock* block,
2839 VkDeviceSize offset,
2840 VkDeviceSize alignment,
2842 VmaSuballocationType suballocationType,
2846 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2847 VMA_ASSERT(block != VMA_NULL);
2848 m_Type = ALLOCATION_TYPE_BLOCK;
2849 m_Alignment = alignment;
2851 m_pUserData = pUserData;
2852 m_SuballocationType = suballocationType;
2853 m_BlockAllocation.m_hPool = hPool;
2854 m_BlockAllocation.m_Block = block;
2855 m_BlockAllocation.m_Offset = offset;
2856 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
2861 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2862 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
2863 m_Type = ALLOCATION_TYPE_BLOCK;
2864 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
2865 m_BlockAllocation.m_Block = VMA_NULL;
2866 m_BlockAllocation.m_Offset = 0;
2867 m_BlockAllocation.m_CanBecomeLost =
true;
2870 void ChangeBlockAllocation(
2871 VmaDeviceMemoryBlock* block,
2872 VkDeviceSize offset)
2874 VMA_ASSERT(block != VMA_NULL);
2875 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2876 m_BlockAllocation.m_Block = block;
2877 m_BlockAllocation.m_Offset = offset;
2880 void InitOwnAllocation(
2881 uint32_t memoryTypeIndex,
2882 VkDeviceMemory hMemory,
2883 VmaSuballocationType suballocationType,
2889 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2890 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2891 m_Type = ALLOCATION_TYPE_OWN;
2894 m_pUserData = pUserData;
2895 m_SuballocationType = suballocationType;
2896 m_OwnAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2897 m_OwnAllocation.m_hMemory = hMemory;
2898 m_OwnAllocation.m_PersistentMap = persistentMap;
2899 m_OwnAllocation.m_pMappedData = pMappedData;
2902 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2903 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2904 VkDeviceSize GetSize()
const {
return m_Size; }
2905 void* GetUserData()
const {
return m_pUserData; }
2906 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2907 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2909 VmaDeviceMemoryBlock* GetBlock()
const 2911 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2912 return m_BlockAllocation.m_Block;
2914 VkDeviceSize GetOffset()
const;
2915 VkDeviceMemory GetMemory()
const;
2916 uint32_t GetMemoryTypeIndex()
const;
2917 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2918 void* GetMappedData()
const;
2919 bool CanBecomeLost()
const;
2920 VmaPool GetPool()
const;
2922 VkResult OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator);
2923 void OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator);
2925 uint32_t GetLastUseFrameIndex()
const 2927 return m_LastUseFrameIndex.load();
2929 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
2931 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
2941 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
2945 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2957 VkDeviceSize m_Alignment;
2958 VkDeviceSize m_Size;
2960 ALLOCATION_TYPE m_Type;
2961 VmaSuballocationType m_SuballocationType;
2962 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
2965 struct BlockAllocation
2968 VmaDeviceMemoryBlock* m_Block;
2969 VkDeviceSize m_Offset;
2970 bool m_CanBecomeLost;
2974 struct OwnAllocation
2976 uint32_t m_MemoryTypeIndex;
2977 VkDeviceMemory m_hMemory;
2978 bool m_PersistentMap;
2979 void* m_pMappedData;
2985 BlockAllocation m_BlockAllocation;
2987 OwnAllocation m_OwnAllocation;
2995 struct VmaSuballocation
2997 VkDeviceSize offset;
2999 VmaAllocation hAllocation;
3000 VmaSuballocationType type;
3003 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3006 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3021 struct VmaAllocationRequest
3023 VkDeviceSize offset;
3024 VkDeviceSize sumFreeSize;
3025 VkDeviceSize sumItemSize;
3026 VmaSuballocationList::iterator item;
3027 size_t itemsToMakeLostCount;
3029 VkDeviceSize CalcCost()
const 3031 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3039 class VmaBlockMetadata
3042 VmaBlockMetadata(VmaAllocator hAllocator);
3043 ~VmaBlockMetadata();
3044 void Init(VkDeviceSize size);
3047 bool Validate()
const;
3048 VkDeviceSize GetSize()
const {
return m_Size; }
3049 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3050 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3051 VkDeviceSize GetUnusedRangeSizeMax()
const;
3053 bool IsEmpty()
const;
3055 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3058 #if VMA_STATS_STRING_ENABLED 3059 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3063 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3068 bool CreateAllocationRequest(
3069 uint32_t currentFrameIndex,
3070 uint32_t frameInUseCount,
3071 VkDeviceSize bufferImageGranularity,
3072 VkDeviceSize allocSize,
3073 VkDeviceSize allocAlignment,
3074 VmaSuballocationType allocType,
3075 bool canMakeOtherLost,
3076 VmaAllocationRequest* pAllocationRequest);
3078 bool MakeRequestedAllocationsLost(
3079 uint32_t currentFrameIndex,
3080 uint32_t frameInUseCount,
3081 VmaAllocationRequest* pAllocationRequest);
3083 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3087 const VmaAllocationRequest& request,
3088 VmaSuballocationType type,
3089 VkDeviceSize allocSize,
3090 VmaAllocation hAllocation);
3093 void Free(
const VmaAllocation allocation);
3096 VkDeviceSize m_Size;
3097 uint32_t m_FreeCount;
3098 VkDeviceSize m_SumFreeSize;
3099 VmaSuballocationList m_Suballocations;
3102 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3104 bool ValidateFreeSuballocationList()
const;
3108 bool CheckAllocation(
3109 uint32_t currentFrameIndex,
3110 uint32_t frameInUseCount,
3111 VkDeviceSize bufferImageGranularity,
3112 VkDeviceSize allocSize,
3113 VkDeviceSize allocAlignment,
3114 VmaSuballocationType allocType,
3115 VmaSuballocationList::const_iterator suballocItem,
3116 bool canMakeOtherLost,
3117 VkDeviceSize* pOffset,
3118 size_t* itemsToMakeLostCount,
3119 VkDeviceSize* pSumFreeSize,
3120 VkDeviceSize* pSumItemSize)
const;
3122 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3126 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3129 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3132 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3141 class VmaDeviceMemoryBlock
3144 uint32_t m_MemoryTypeIndex;
3145 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3146 VkDeviceMemory m_hMemory;
3147 bool m_PersistentMap;
3148 void* m_pMappedData;
3149 VmaBlockMetadata m_Metadata;
3151 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3153 ~VmaDeviceMemoryBlock()
3155 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3160 uint32_t newMemoryTypeIndex,
3161 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
3162 VkDeviceMemory newMemory,
3163 VkDeviceSize newSize,
3167 void Destroy(VmaAllocator allocator);
3170 bool Validate()
const;
3173 struct VmaPointerLess
3175 bool operator()(
const void* lhs,
const void* rhs)
const 3181 class VmaDefragmentator;
3189 struct VmaBlockVector
3192 VmaAllocator hAllocator,
3193 uint32_t memoryTypeIndex,
3194 VMA_BLOCK_VECTOR_TYPE blockVectorType,
3195 VkDeviceSize preferredBlockSize,
3196 size_t minBlockCount,
3197 size_t maxBlockCount,
3198 VkDeviceSize bufferImageGranularity,
3199 uint32_t frameInUseCount,
3203 VkResult CreateMinBlocks();
3205 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3206 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3207 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3208 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3209 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const {
return m_BlockVectorType; }
3213 bool IsEmpty()
const {
return m_Blocks.empty(); }
3216 VmaPool hCurrentPool,
3217 uint32_t currentFrameIndex,
3218 const VkMemoryRequirements& vkMemReq,
3220 VmaSuballocationType suballocType,
3221 VmaAllocation* pAllocation);
3224 VmaAllocation hAllocation);
3229 #if VMA_STATS_STRING_ENABLED 3230 void PrintDetailedMap(
class VmaJsonWriter& json);
3233 void UnmapPersistentlyMappedMemory();
3234 VkResult MapPersistentlyMappedMemory();
3236 void MakePoolAllocationsLost(
3237 uint32_t currentFrameIndex,
3238 size_t* pLostAllocationCount);
3240 VmaDefragmentator* EnsureDefragmentator(
3241 VmaAllocator hAllocator,
3242 uint32_t currentFrameIndex);
3244 VkResult Defragment(
3246 VkDeviceSize& maxBytesToMove,
3247 uint32_t& maxAllocationsToMove);
3249 void DestroyDefragmentator();
3252 friend class VmaDefragmentator;
3254 const VmaAllocator m_hAllocator;
3255 const uint32_t m_MemoryTypeIndex;
3256 const VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3257 const VkDeviceSize m_PreferredBlockSize;
3258 const size_t m_MinBlockCount;
3259 const size_t m_MaxBlockCount;
3260 const VkDeviceSize m_BufferImageGranularity;
3261 const uint32_t m_FrameInUseCount;
3262 const bool m_IsCustomPool;
3265 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3269 bool m_HasEmptyBlock;
3270 VmaDefragmentator* m_pDefragmentator;
3273 void Remove(VmaDeviceMemoryBlock* pBlock);
3277 void IncrementallySortBlocks();
3279 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3285 VmaBlockVector m_BlockVector;
3289 VmaAllocator hAllocator,
3293 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3295 #if VMA_STATS_STRING_ENABLED 3300 class VmaDefragmentator
3302 const VmaAllocator m_hAllocator;
3303 VmaBlockVector*
const m_pBlockVector;
3304 uint32_t m_CurrentFrameIndex;
3305 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3306 VkDeviceSize m_BytesMoved;
3307 uint32_t m_AllocationsMoved;
3309 struct AllocationInfo
3311 VmaAllocation m_hAllocation;
3312 VkBool32* m_pChanged;
3315 m_hAllocation(VK_NULL_HANDLE),
3316 m_pChanged(VMA_NULL)
3321 struct AllocationInfoSizeGreater
3323 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3325 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3330 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3334 VmaDeviceMemoryBlock* m_pBlock;
3335 bool m_HasNonMovableAllocations;
3336 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3338 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3340 m_HasNonMovableAllocations(true),
3341 m_Allocations(pAllocationCallbacks),
3342 m_pMappedDataForDefragmentation(VMA_NULL)
3346 void CalcHasNonMovableAllocations()
3348 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3349 const size_t defragmentAllocCount = m_Allocations.size();
3350 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3353 void SortAllocationsBySizeDescecnding()
3355 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3358 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3359 void Unmap(VmaAllocator hAllocator);
3363 void* m_pMappedDataForDefragmentation;
3366 struct BlockPointerLess
3368 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3370 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3372 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3374 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3380 struct BlockInfoCompareMoveDestination
3382 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3384 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3388 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3392 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3400 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3401 BlockInfoVector m_Blocks;
3403 VkResult DefragmentRound(
3404 VkDeviceSize maxBytesToMove,
3405 uint32_t maxAllocationsToMove);
3407 static bool MoveMakesSense(
3408 size_t dstBlockIndex, VkDeviceSize dstOffset,
3409 size_t srcBlockIndex, VkDeviceSize srcOffset);
3413 VmaAllocator hAllocator,
3414 VmaBlockVector* pBlockVector,
3415 uint32_t currentFrameIndex);
3417 ~VmaDefragmentator();
3419 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3420 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3422 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3424 VkResult Defragment(
3425 VkDeviceSize maxBytesToMove,
3426 uint32_t maxAllocationsToMove);
3430 struct VmaAllocator_T
3434 bool m_AllocationCallbacksSpecified;
3435 VkAllocationCallbacks m_AllocationCallbacks;
3439 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
3442 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3443 VMA_MUTEX m_HeapSizeLimitMutex;
3445 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3446 VkPhysicalDeviceMemoryProperties m_MemProps;
3449 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3452 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3453 AllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3454 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
3459 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3461 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3465 return m_VulkanFunctions;
3468 VkDeviceSize GetBufferImageGranularity()
const 3471 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3472 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3475 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3476 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3478 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3480 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3481 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3485 VkResult AllocateMemory(
3486 const VkMemoryRequirements& vkMemReq,
3488 VmaSuballocationType suballocType,
3489 VmaAllocation* pAllocation);
3492 void FreeMemory(
const VmaAllocation allocation);
3494 void CalculateStats(
VmaStats* pStats);
3496 #if VMA_STATS_STRING_ENABLED 3497 void PrintDetailedMap(
class VmaJsonWriter& json);
3500 void UnmapPersistentlyMappedMemory();
3501 VkResult MapPersistentlyMappedMemory();
3503 VkResult Defragment(
3504 VmaAllocation* pAllocations,
3505 size_t allocationCount,
3506 VkBool32* pAllocationsChanged,
3510 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3513 void DestroyPool(VmaPool pool);
3514 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3516 void SetCurrentFrameIndex(uint32_t frameIndex);
3518 void MakePoolAllocationsLost(
3520 size_t* pLostAllocationCount);
3522 void CreateLostAllocation(VmaAllocation* pAllocation);
3524 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3525 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3528 VkDeviceSize m_PreferredLargeHeapBlockSize;
3529 VkDeviceSize m_PreferredSmallHeapBlockSize;
3531 VkPhysicalDevice m_PhysicalDevice;
3532 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3534 VMA_MUTEX m_PoolsMutex;
3536 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3542 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3544 VkResult AllocateMemoryOfType(
3545 const VkMemoryRequirements& vkMemReq,
3547 uint32_t memTypeIndex,
3548 VmaSuballocationType suballocType,
3549 VmaAllocation* pAllocation);
3552 VkResult AllocateOwnMemory(
3554 VmaSuballocationType suballocType,
3555 uint32_t memTypeIndex,
3558 VmaAllocation* pAllocation);
3561 void FreeOwnMemory(VmaAllocation allocation);
3567 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3569 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3572 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3574 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3577 template<
typename T>
3578 static T* VmaAllocate(VmaAllocator hAllocator)
3580 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3583 template<
typename T>
3584 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3586 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3589 template<
typename T>
3590 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3595 VmaFree(hAllocator, ptr);
3599 template<
typename T>
3600 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3604 for(
size_t i = count; i--; )
3606 VmaFree(hAllocator, ptr);
3613 #if VMA_STATS_STRING_ENABLED 3615 class VmaStringBuilder
3618 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3619 size_t GetLength()
const {
return m_Data.size(); }
3620 const char* GetData()
const {
return m_Data.data(); }
3622 void Add(
char ch) { m_Data.push_back(ch); }
3623 void Add(
const char* pStr);
3624 void AddNewLine() { Add(
'\n'); }
3625 void AddNumber(uint32_t num);
3626 void AddNumber(uint64_t num);
3627 void AddPointer(
const void* ptr);
3630 VmaVector< char, VmaStlAllocator<char> > m_Data;
3633 void VmaStringBuilder::Add(
const char* pStr)
3635 const size_t strLen = strlen(pStr);
3638 const size_t oldCount = m_Data.size();
3639 m_Data.resize(oldCount + strLen);
3640 memcpy(m_Data.data() + oldCount, pStr, strLen);
3644 void VmaStringBuilder::AddNumber(uint32_t num)
3647 VmaUint32ToStr(buf,
sizeof(buf), num);
3651 void VmaStringBuilder::AddNumber(uint64_t num)
3654 VmaUint64ToStr(buf,
sizeof(buf), num);
3658 void VmaStringBuilder::AddPointer(
const void* ptr)
3661 VmaPtrToStr(buf,
sizeof(buf), ptr);
3665 #endif // #if VMA_STATS_STRING_ENABLED 3670 #if VMA_STATS_STRING_ENABLED 3675 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3678 void BeginObject(
bool singleLine =
false);
3681 void BeginArray(
bool singleLine =
false);
3684 void WriteString(
const char* pStr);
3685 void BeginString(
const char* pStr = VMA_NULL);
3686 void ContinueString(
const char* pStr);
3687 void ContinueString(uint32_t n);
3688 void ContinueString(uint64_t n);
3689 void EndString(
const char* pStr = VMA_NULL);
3691 void WriteNumber(uint32_t n);
3692 void WriteNumber(uint64_t n);
3693 void WriteBool(
bool b);
3697 static const char*
const INDENT;
3699 enum COLLECTION_TYPE
3701 COLLECTION_TYPE_OBJECT,
3702 COLLECTION_TYPE_ARRAY,
3706 COLLECTION_TYPE type;
3707 uint32_t valueCount;
3708 bool singleLineMode;
3711 VmaStringBuilder& m_SB;
3712 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3713 bool m_InsideString;
3715 void BeginValue(
bool isString);
3716 void WriteIndent(
bool oneLess =
false);
3719 const char*
const VmaJsonWriter::INDENT =
" ";
3721 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3723 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3724 m_InsideString(false)
3728 VmaJsonWriter::~VmaJsonWriter()
3730 VMA_ASSERT(!m_InsideString);
3731 VMA_ASSERT(m_Stack.empty());
3734 void VmaJsonWriter::BeginObject(
bool singleLine)
3736 VMA_ASSERT(!m_InsideString);
3742 item.type = COLLECTION_TYPE_OBJECT;
3743 item.valueCount = 0;
3744 item.singleLineMode = singleLine;
3745 m_Stack.push_back(item);
3748 void VmaJsonWriter::EndObject()
3750 VMA_ASSERT(!m_InsideString);
3755 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3759 void VmaJsonWriter::BeginArray(
bool singleLine)
3761 VMA_ASSERT(!m_InsideString);
3767 item.type = COLLECTION_TYPE_ARRAY;
3768 item.valueCount = 0;
3769 item.singleLineMode = singleLine;
3770 m_Stack.push_back(item);
3773 void VmaJsonWriter::EndArray()
3775 VMA_ASSERT(!m_InsideString);
3780 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3784 void VmaJsonWriter::WriteString(
const char* pStr)
3790 void VmaJsonWriter::BeginString(
const char* pStr)
3792 VMA_ASSERT(!m_InsideString);
3796 m_InsideString =
true;
3797 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3799 ContinueString(pStr);
3803 void VmaJsonWriter::ContinueString(
const char* pStr)
3805 VMA_ASSERT(m_InsideString);
3807 const size_t strLen = strlen(pStr);
3808 for(
size_t i = 0; i < strLen; ++i)
3835 VMA_ASSERT(0 &&
"Character not currently supported.");
3841 void VmaJsonWriter::ContinueString(uint32_t n)
3843 VMA_ASSERT(m_InsideString);
3847 void VmaJsonWriter::ContinueString(uint64_t n)
3849 VMA_ASSERT(m_InsideString);
3853 void VmaJsonWriter::EndString(
const char* pStr)
3855 VMA_ASSERT(m_InsideString);
3856 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3858 ContinueString(pStr);
3861 m_InsideString =
false;
3864 void VmaJsonWriter::WriteNumber(uint32_t n)
3866 VMA_ASSERT(!m_InsideString);
3871 void VmaJsonWriter::WriteNumber(uint64_t n)
3873 VMA_ASSERT(!m_InsideString);
3878 void VmaJsonWriter::WriteBool(
bool b)
3880 VMA_ASSERT(!m_InsideString);
3882 m_SB.Add(b ?
"true" :
"false");
3885 void VmaJsonWriter::WriteNull()
3887 VMA_ASSERT(!m_InsideString);
3892 void VmaJsonWriter::BeginValue(
bool isString)
3894 if(!m_Stack.empty())
3896 StackItem& currItem = m_Stack.back();
3897 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3898 currItem.valueCount % 2 == 0)
3900 VMA_ASSERT(isString);
3903 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3904 currItem.valueCount % 2 != 0)
3908 else if(currItem.valueCount > 0)
3917 ++currItem.valueCount;
3921 void VmaJsonWriter::WriteIndent(
bool oneLess)
3923 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
3927 size_t count = m_Stack.size();
3928 if(count > 0 && oneLess)
3932 for(
size_t i = 0; i < count; ++i)
3939 #endif // #if VMA_STATS_STRING_ENABLED 3943 VkDeviceSize VmaAllocation_T::GetOffset()
const 3947 case ALLOCATION_TYPE_BLOCK:
3948 return m_BlockAllocation.m_Offset;
3949 case ALLOCATION_TYPE_OWN:
3957 VkDeviceMemory VmaAllocation_T::GetMemory()
const 3961 case ALLOCATION_TYPE_BLOCK:
3962 return m_BlockAllocation.m_Block->m_hMemory;
3963 case ALLOCATION_TYPE_OWN:
3964 return m_OwnAllocation.m_hMemory;
3967 return VK_NULL_HANDLE;
3971 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 3975 case ALLOCATION_TYPE_BLOCK:
3976 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
3977 case ALLOCATION_TYPE_OWN:
3978 return m_OwnAllocation.m_MemoryTypeIndex;
3985 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 3989 case ALLOCATION_TYPE_BLOCK:
3990 return m_BlockAllocation.m_Block->m_BlockVectorType;
3991 case ALLOCATION_TYPE_OWN:
3992 return (m_OwnAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
3995 return VMA_BLOCK_VECTOR_TYPE_COUNT;
3999 void* VmaAllocation_T::GetMappedData()
const 4003 case ALLOCATION_TYPE_BLOCK:
4004 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
4006 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
4013 case ALLOCATION_TYPE_OWN:
4014 return m_OwnAllocation.m_pMappedData;
4021 bool VmaAllocation_T::CanBecomeLost()
const 4025 case ALLOCATION_TYPE_BLOCK:
4026 return m_BlockAllocation.m_CanBecomeLost;
4027 case ALLOCATION_TYPE_OWN:
4035 VmaPool VmaAllocation_T::GetPool()
const 4037 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4038 return m_BlockAllocation.m_hPool;
4041 VkResult VmaAllocation_T::OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator)
4043 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4044 if(m_OwnAllocation.m_PersistentMap)
4046 return (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4047 hAllocator->m_hDevice,
4048 m_OwnAllocation.m_hMemory,
4052 &m_OwnAllocation.m_pMappedData);
4056 void VmaAllocation_T::OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator)
4058 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4059 if(m_OwnAllocation.m_pMappedData)
4061 VMA_ASSERT(m_OwnAllocation.m_PersistentMap);
4062 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_OwnAllocation.m_hMemory);
4063 m_OwnAllocation.m_pMappedData = VMA_NULL;
4068 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4070 VMA_ASSERT(CanBecomeLost());
4076 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4079 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4084 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4090 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4100 #if VMA_STATS_STRING_ENABLED 4103 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4112 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4116 json.WriteString(
"Blocks");
4119 json.WriteString(
"Allocations");
4122 json.WriteString(
"UnusedRanges");
4125 json.WriteString(
"UsedBytes");
4128 json.WriteString(
"UnusedBytes");
4133 json.WriteString(
"AllocationSize");
4134 json.BeginObject(
true);
4135 json.WriteString(
"Min");
4137 json.WriteString(
"Avg");
4139 json.WriteString(
"Max");
4146 json.WriteString(
"UnusedRangeSize");
4147 json.BeginObject(
true);
4148 json.WriteString(
"Min");
4150 json.WriteString(
"Avg");
4152 json.WriteString(
"Max");
4160 #endif // #if VMA_STATS_STRING_ENABLED 4162 struct VmaSuballocationItemSizeLess
4165 const VmaSuballocationList::iterator lhs,
4166 const VmaSuballocationList::iterator rhs)
const 4168 return lhs->size < rhs->size;
4171 const VmaSuballocationList::iterator lhs,
4172 VkDeviceSize rhsSize)
const 4174 return lhs->size < rhsSize;
4181 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4185 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4186 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4190 VmaBlockMetadata::~VmaBlockMetadata()
4194 void VmaBlockMetadata::Init(VkDeviceSize size)
4198 m_SumFreeSize = size;
4200 VmaSuballocation suballoc = {};
4201 suballoc.offset = 0;
4202 suballoc.size = size;
4203 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4204 suballoc.hAllocation = VK_NULL_HANDLE;
4206 m_Suballocations.push_back(suballoc);
4207 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4209 m_FreeSuballocationsBySize.push_back(suballocItem);
4212 bool VmaBlockMetadata::Validate()
const 4214 if(m_Suballocations.empty())
4220 VkDeviceSize calculatedOffset = 0;
4222 uint32_t calculatedFreeCount = 0;
4224 VkDeviceSize calculatedSumFreeSize = 0;
4227 size_t freeSuballocationsToRegister = 0;
4229 bool prevFree =
false;
4231 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4232 suballocItem != m_Suballocations.cend();
4235 const VmaSuballocation& subAlloc = *suballocItem;
4238 if(subAlloc.offset != calculatedOffset)
4243 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4245 if(prevFree && currFree)
4249 prevFree = currFree;
4251 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4258 calculatedSumFreeSize += subAlloc.size;
4259 ++calculatedFreeCount;
4260 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4262 ++freeSuballocationsToRegister;
4266 calculatedOffset += subAlloc.size;
4271 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4276 VkDeviceSize lastSize = 0;
4277 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4279 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4282 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4287 if(suballocItem->size < lastSize)
4292 lastSize = suballocItem->size;
4297 ValidateFreeSuballocationList() &&
4298 (calculatedOffset == m_Size) &&
4299 (calculatedSumFreeSize == m_SumFreeSize) &&
4300 (calculatedFreeCount == m_FreeCount);
4303 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4305 if(!m_FreeSuballocationsBySize.empty())
4307 return m_FreeSuballocationsBySize.back()->size;
4315 bool VmaBlockMetadata::IsEmpty()
const 4317 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4320 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4324 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4336 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4337 suballocItem != m_Suballocations.cend();
4340 const VmaSuballocation& suballoc = *suballocItem;
4341 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4354 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4356 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4358 inoutStats.
size += m_Size;
4365 #if VMA_STATS_STRING_ENABLED 4367 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4371 json.WriteString(
"TotalBytes");
4372 json.WriteNumber(m_Size);
4374 json.WriteString(
"UnusedBytes");
4375 json.WriteNumber(m_SumFreeSize);
4377 json.WriteString(
"Allocations");
4378 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4380 json.WriteString(
"UnusedRanges");
4381 json.WriteNumber(m_FreeCount);
4383 json.WriteString(
"Suballocations");
4386 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4387 suballocItem != m_Suballocations.cend();
4388 ++suballocItem, ++i)
4390 json.BeginObject(
true);
4392 json.WriteString(
"Type");
4393 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4395 json.WriteString(
"Size");
4396 json.WriteNumber(suballocItem->size);
4398 json.WriteString(
"Offset");
4399 json.WriteNumber(suballocItem->offset);
4408 #endif // #if VMA_STATS_STRING_ENABLED 4420 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4422 VMA_ASSERT(IsEmpty());
4423 pAllocationRequest->offset = 0;
4424 pAllocationRequest->sumFreeSize = m_SumFreeSize;
4425 pAllocationRequest->sumItemSize = 0;
4426 pAllocationRequest->item = m_Suballocations.begin();
4427 pAllocationRequest->itemsToMakeLostCount = 0;
4430 bool VmaBlockMetadata::CreateAllocationRequest(
4431 uint32_t currentFrameIndex,
4432 uint32_t frameInUseCount,
4433 VkDeviceSize bufferImageGranularity,
4434 VkDeviceSize allocSize,
4435 VkDeviceSize allocAlignment,
4436 VmaSuballocationType allocType,
4437 bool canMakeOtherLost,
4438 VmaAllocationRequest* pAllocationRequest)
4440 VMA_ASSERT(allocSize > 0);
4441 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4442 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4443 VMA_HEAVY_ASSERT(Validate());
4446 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4452 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4453 if(freeSuballocCount > 0)
4458 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4459 m_FreeSuballocationsBySize.data(),
4460 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4462 VmaSuballocationItemSizeLess());
4463 size_t index = it - m_FreeSuballocationsBySize.data();
4464 for(; index < freeSuballocCount; ++index)
4469 bufferImageGranularity,
4473 m_FreeSuballocationsBySize[index],
4475 &pAllocationRequest->offset,
4476 &pAllocationRequest->itemsToMakeLostCount,
4477 &pAllocationRequest->sumFreeSize,
4478 &pAllocationRequest->sumItemSize))
4480 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4488 for(
size_t index = freeSuballocCount; index--; )
4493 bufferImageGranularity,
4497 m_FreeSuballocationsBySize[index],
4499 &pAllocationRequest->offset,
4500 &pAllocationRequest->itemsToMakeLostCount,
4501 &pAllocationRequest->sumFreeSize,
4502 &pAllocationRequest->sumItemSize))
4504 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4511 if(canMakeOtherLost)
4515 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4516 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4518 VmaAllocationRequest tmpAllocRequest = {};
4519 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4520 suballocIt != m_Suballocations.end();
4523 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4524 suballocIt->hAllocation->CanBecomeLost())
4529 bufferImageGranularity,
4535 &tmpAllocRequest.offset,
4536 &tmpAllocRequest.itemsToMakeLostCount,
4537 &tmpAllocRequest.sumFreeSize,
4538 &tmpAllocRequest.sumItemSize))
4540 tmpAllocRequest.item = suballocIt;
4542 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4544 *pAllocationRequest = tmpAllocRequest;
4550 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4559 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
4560 uint32_t currentFrameIndex,
4561 uint32_t frameInUseCount,
4562 VmaAllocationRequest* pAllocationRequest)
4564 while(pAllocationRequest->itemsToMakeLostCount > 0)
4566 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4568 ++pAllocationRequest->item;
4570 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4571 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4572 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4573 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4575 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4576 --pAllocationRequest->itemsToMakeLostCount;
4584 VMA_HEAVY_ASSERT(Validate());
4585 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4586 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4591 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4593 uint32_t lostAllocationCount = 0;
4594 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4595 it != m_Suballocations.end();
4598 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4599 it->hAllocation->CanBecomeLost() &&
4600 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4602 it = FreeSuballocation(it);
4603 ++lostAllocationCount;
4606 return lostAllocationCount;
4609 void VmaBlockMetadata::Alloc(
4610 const VmaAllocationRequest& request,
4611 VmaSuballocationType type,
4612 VkDeviceSize allocSize,
4613 VmaAllocation hAllocation)
4615 VMA_ASSERT(request.item != m_Suballocations.end());
4616 VmaSuballocation& suballoc = *request.item;
4618 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4620 VMA_ASSERT(request.offset >= suballoc.offset);
4621 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4622 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4623 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4627 UnregisterFreeSuballocation(request.item);
4629 suballoc.offset = request.offset;
4630 suballoc.size = allocSize;
4631 suballoc.type = type;
4632 suballoc.hAllocation = hAllocation;
4637 VmaSuballocation paddingSuballoc = {};
4638 paddingSuballoc.offset = request.offset + allocSize;
4639 paddingSuballoc.size = paddingEnd;
4640 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4641 VmaSuballocationList::iterator next = request.item;
4643 const VmaSuballocationList::iterator paddingEndItem =
4644 m_Suballocations.insert(next, paddingSuballoc);
4645 RegisterFreeSuballocation(paddingEndItem);
4651 VmaSuballocation paddingSuballoc = {};
4652 paddingSuballoc.offset = request.offset - paddingBegin;
4653 paddingSuballoc.size = paddingBegin;
4654 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4655 const VmaSuballocationList::iterator paddingBeginItem =
4656 m_Suballocations.insert(request.item, paddingSuballoc);
4657 RegisterFreeSuballocation(paddingBeginItem);
4661 m_FreeCount = m_FreeCount - 1;
4662 if(paddingBegin > 0)
4670 m_SumFreeSize -= allocSize;
4673 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
4675 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4676 suballocItem != m_Suballocations.end();
4679 VmaSuballocation& suballoc = *suballocItem;
4680 if(suballoc.hAllocation == allocation)
4682 FreeSuballocation(suballocItem);
4683 VMA_HEAVY_ASSERT(Validate());
4687 VMA_ASSERT(0 &&
"Not found!");
4690 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 4692 VkDeviceSize lastSize = 0;
4693 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
4695 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
4697 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
4702 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4707 if(it->size < lastSize)
4713 lastSize = it->size;
4718 bool VmaBlockMetadata::CheckAllocation(
4719 uint32_t currentFrameIndex,
4720 uint32_t frameInUseCount,
4721 VkDeviceSize bufferImageGranularity,
4722 VkDeviceSize allocSize,
4723 VkDeviceSize allocAlignment,
4724 VmaSuballocationType allocType,
4725 VmaSuballocationList::const_iterator suballocItem,
4726 bool canMakeOtherLost,
4727 VkDeviceSize* pOffset,
4728 size_t* itemsToMakeLostCount,
4729 VkDeviceSize* pSumFreeSize,
4730 VkDeviceSize* pSumItemSize)
const 4732 VMA_ASSERT(allocSize > 0);
4733 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4734 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4735 VMA_ASSERT(pOffset != VMA_NULL);
4737 *itemsToMakeLostCount = 0;
4741 if(canMakeOtherLost)
4743 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4745 *pSumFreeSize = suballocItem->size;
4749 if(suballocItem->hAllocation->CanBecomeLost() &&
4750 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4752 ++*itemsToMakeLostCount;
4753 *pSumItemSize = suballocItem->size;
4762 if(m_Size - suballocItem->offset < allocSize)
4768 *pOffset = suballocItem->offset;
4771 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4773 *pOffset += VMA_DEBUG_MARGIN;
4777 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4778 *pOffset = VmaAlignUp(*pOffset, alignment);
4782 if(bufferImageGranularity > 1)
4784 bool bufferImageGranularityConflict =
false;
4785 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4786 while(prevSuballocItem != m_Suballocations.cbegin())
4789 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4790 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4792 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4794 bufferImageGranularityConflict =
true;
4802 if(bufferImageGranularityConflict)
4804 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4810 if(*pOffset >= suballocItem->offset + suballocItem->size)
4816 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
4819 VmaSuballocationList::const_iterator next = suballocItem;
4821 const VkDeviceSize requiredEndMargin =
4822 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4824 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
4826 if(suballocItem->offset + totalSize > m_Size)
4833 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
4834 if(totalSize > suballocItem->size)
4836 VkDeviceSize remainingSize = totalSize - suballocItem->size;
4837 while(remainingSize > 0)
4840 if(lastSuballocItem == m_Suballocations.cend())
4844 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4846 *pSumFreeSize += lastSuballocItem->size;
4850 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
4851 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
4852 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4854 ++*itemsToMakeLostCount;
4855 *pSumItemSize += lastSuballocItem->size;
4862 remainingSize = (lastSuballocItem->size < remainingSize) ?
4863 remainingSize - lastSuballocItem->size : 0;
4869 if(bufferImageGranularity > 1)
4871 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
4873 while(nextSuballocItem != m_Suballocations.cend())
4875 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4876 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4878 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4880 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
4881 if(nextSuballoc.hAllocation->CanBecomeLost() &&
4882 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4884 ++*itemsToMakeLostCount;
4903 const VmaSuballocation& suballoc = *suballocItem;
4904 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4906 *pSumFreeSize = suballoc.size;
4909 if(suballoc.size < allocSize)
4915 *pOffset = suballoc.offset;
4918 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4920 *pOffset += VMA_DEBUG_MARGIN;
4924 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4925 *pOffset = VmaAlignUp(*pOffset, alignment);
4929 if(bufferImageGranularity > 1)
4931 bool bufferImageGranularityConflict =
false;
4932 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4933 while(prevSuballocItem != m_Suballocations.cbegin())
4936 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4937 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4939 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4941 bufferImageGranularityConflict =
true;
4949 if(bufferImageGranularityConflict)
4951 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4956 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
4959 VmaSuballocationList::const_iterator next = suballocItem;
4961 const VkDeviceSize requiredEndMargin =
4962 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4965 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
4972 if(bufferImageGranularity > 1)
4974 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
4976 while(nextSuballocItem != m_Suballocations.cend())
4978 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4979 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4981 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5000 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5002 VMA_ASSERT(item != m_Suballocations.end());
5003 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5005 VmaSuballocationList::iterator nextItem = item;
5007 VMA_ASSERT(nextItem != m_Suballocations.end());
5008 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5010 item->size += nextItem->size;
5012 m_Suballocations.erase(nextItem);
5015 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5018 VmaSuballocation& suballoc = *suballocItem;
5019 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5020 suballoc.hAllocation = VK_NULL_HANDLE;
5024 m_SumFreeSize += suballoc.size;
5027 bool mergeWithNext =
false;
5028 bool mergeWithPrev =
false;
5030 VmaSuballocationList::iterator nextItem = suballocItem;
5032 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5034 mergeWithNext =
true;
5037 VmaSuballocationList::iterator prevItem = suballocItem;
5038 if(suballocItem != m_Suballocations.begin())
5041 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5043 mergeWithPrev =
true;
5049 UnregisterFreeSuballocation(nextItem);
5050 MergeFreeWithNext(suballocItem);
5055 UnregisterFreeSuballocation(prevItem);
5056 MergeFreeWithNext(prevItem);
5057 RegisterFreeSuballocation(prevItem);
5062 RegisterFreeSuballocation(suballocItem);
5063 return suballocItem;
5067 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5069 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5070 VMA_ASSERT(item->size > 0);
5074 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5076 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5078 if(m_FreeSuballocationsBySize.empty())
5080 m_FreeSuballocationsBySize.push_back(item);
5084 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5092 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5094 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5095 VMA_ASSERT(item->size > 0);
5099 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5101 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5103 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5104 m_FreeSuballocationsBySize.data(),
5105 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5107 VmaSuballocationItemSizeLess());
5108 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5109 index < m_FreeSuballocationsBySize.size();
5112 if(m_FreeSuballocationsBySize[index] == item)
5114 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5117 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5119 VMA_ASSERT(0 &&
"Not found.");
5128 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5129 m_MemoryTypeIndex(UINT32_MAX),
5130 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
5131 m_hMemory(VK_NULL_HANDLE),
5132 m_PersistentMap(false),
5133 m_pMappedData(VMA_NULL),
5134 m_Metadata(hAllocator)
5138 void VmaDeviceMemoryBlock::Init(
5139 uint32_t newMemoryTypeIndex,
5140 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
5141 VkDeviceMemory newMemory,
5142 VkDeviceSize newSize,
5146 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5148 m_MemoryTypeIndex = newMemoryTypeIndex;
5149 m_BlockVectorType = newBlockVectorType;
5150 m_hMemory = newMemory;
5151 m_PersistentMap = persistentMap;
5152 m_pMappedData = pMappedData;
5154 m_Metadata.Init(newSize);
5157 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5161 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5163 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5164 if(m_pMappedData != VMA_NULL)
5166 (allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, m_hMemory);
5167 m_pMappedData = VMA_NULL;
5170 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5171 m_hMemory = VK_NULL_HANDLE;
5174 bool VmaDeviceMemoryBlock::Validate()
const 5176 if((m_hMemory == VK_NULL_HANDLE) ||
5177 (m_Metadata.GetSize() == 0))
5182 return m_Metadata.Validate();
5187 memset(&outInfo, 0,
sizeof(outInfo));
5206 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5214 VmaPool_T::VmaPool_T(
5215 VmaAllocator hAllocator,
5219 createInfo.memoryTypeIndex,
5221 VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
5222 createInfo.blockSize,
5223 createInfo.minBlockCount,
5224 createInfo.maxBlockCount,
5226 createInfo.frameInUseCount,
5231 VmaPool_T::~VmaPool_T()
5235 #if VMA_STATS_STRING_ENABLED 5237 #endif // #if VMA_STATS_STRING_ENABLED 5239 VmaBlockVector::VmaBlockVector(
5240 VmaAllocator hAllocator,
5241 uint32_t memoryTypeIndex,
5242 VMA_BLOCK_VECTOR_TYPE blockVectorType,
5243 VkDeviceSize preferredBlockSize,
5244 size_t minBlockCount,
5245 size_t maxBlockCount,
5246 VkDeviceSize bufferImageGranularity,
5247 uint32_t frameInUseCount,
5248 bool isCustomPool) :
5249 m_hAllocator(hAllocator),
5250 m_MemoryTypeIndex(memoryTypeIndex),
5251 m_BlockVectorType(blockVectorType),
5252 m_PreferredBlockSize(preferredBlockSize),
5253 m_MinBlockCount(minBlockCount),
5254 m_MaxBlockCount(maxBlockCount),
5255 m_BufferImageGranularity(bufferImageGranularity),
5256 m_FrameInUseCount(frameInUseCount),
5257 m_IsCustomPool(isCustomPool),
5258 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5259 m_HasEmptyBlock(false),
5260 m_pDefragmentator(VMA_NULL)
5264 VmaBlockVector::~VmaBlockVector()
5266 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5268 for(
size_t i = m_Blocks.size(); i--; )
5270 m_Blocks[i]->Destroy(m_hAllocator);
5271 vma_delete(m_hAllocator, m_Blocks[i]);
5275 VkResult VmaBlockVector::CreateMinBlocks()
5277 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5279 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5280 if(res != VK_SUCCESS)
5288 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5296 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5298 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5300 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5302 VMA_HEAVY_ASSERT(pBlock->Validate());
5303 pBlock->m_Metadata.AddPoolStats(*pStats);
5307 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5309 VkResult VmaBlockVector::Allocate(
5310 VmaPool hCurrentPool,
5311 uint32_t currentFrameIndex,
5312 const VkMemoryRequirements& vkMemReq,
5314 VmaSuballocationType suballocType,
5315 VmaAllocation* pAllocation)
5319 (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED))
5321 VMA_ASSERT(0 &&
"Usage of VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT must match VMA_POOL_CREATE_PERSISTENT_MAP_BIT.");
5322 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5325 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5329 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5331 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5332 VMA_ASSERT(pCurrBlock);
5333 VmaAllocationRequest currRequest = {};
5334 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5337 m_BufferImageGranularity,
5345 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5348 if(pCurrBlock->m_Metadata.IsEmpty())
5350 m_HasEmptyBlock =
false;
5353 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5354 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5355 (*pAllocation)->InitBlockAllocation(
5364 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5365 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5370 const bool canCreateNewBlock =
5372 (m_Blocks.size() < m_MaxBlockCount);
5375 if(canCreateNewBlock)
5378 VkDeviceSize blockSize = m_PreferredBlockSize;
5379 size_t newBlockIndex = 0;
5380 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5383 if(res < 0 && m_IsCustomPool ==
false)
5387 if(blockSize >= vkMemReq.size)
5389 res = CreateBlock(blockSize, &newBlockIndex);
5394 if(blockSize >= vkMemReq.size)
5396 res = CreateBlock(blockSize, &newBlockIndex);
5401 if(res == VK_SUCCESS)
5403 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5404 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
5407 VmaAllocationRequest allocRequest;
5408 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
5409 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5410 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5411 (*pAllocation)->InitBlockAllocation(
5414 allocRequest.offset,
5420 VMA_HEAVY_ASSERT(pBlock->Validate());
5421 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5430 if(canMakeOtherLost)
5432 uint32_t tryIndex = 0;
5433 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5435 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5436 VmaAllocationRequest bestRequest = {};
5437 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5441 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5443 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5444 VMA_ASSERT(pCurrBlock);
5445 VmaAllocationRequest currRequest = {};
5446 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5449 m_BufferImageGranularity,
5456 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5457 if(pBestRequestBlock == VMA_NULL ||
5458 currRequestCost < bestRequestCost)
5460 pBestRequestBlock = pCurrBlock;
5461 bestRequest = currRequest;
5462 bestRequestCost = currRequestCost;
5464 if(bestRequestCost == 0)
5472 if(pBestRequestBlock != VMA_NULL)
5474 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
5480 if(pBestRequestBlock->m_Metadata.IsEmpty())
5482 m_HasEmptyBlock =
false;
5485 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5486 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5487 (*pAllocation)->InitBlockAllocation(
5496 VMA_HEAVY_ASSERT(pBlock->Validate());
5497 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5511 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5513 return VK_ERROR_TOO_MANY_OBJECTS;
5517 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5520 void VmaBlockVector::Free(
5521 VmaAllocation hAllocation)
5523 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5527 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5529 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5531 pBlock->m_Metadata.Free(hAllocation);
5532 VMA_HEAVY_ASSERT(pBlock->Validate());
5534 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5537 if(pBlock->m_Metadata.IsEmpty())
5540 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5542 pBlockToDelete = pBlock;
5548 m_HasEmptyBlock =
true;
5552 IncrementallySortBlocks();
5557 if(pBlockToDelete != VMA_NULL)
5559 VMA_DEBUG_LOG(
" Deleted empty allocation");
5560 pBlockToDelete->Destroy(m_hAllocator);
5561 vma_delete(m_hAllocator, pBlockToDelete);
5565 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5567 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5569 if(m_Blocks[blockIndex] == pBlock)
5571 VmaVectorRemove(m_Blocks, blockIndex);
5578 void VmaBlockVector::IncrementallySortBlocks()
5581 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5583 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
5585 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5591 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5593 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5594 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5595 allocInfo.allocationSize = blockSize;
5596 VkDeviceMemory mem = VK_NULL_HANDLE;
5597 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
5606 void* pMappedData = VMA_NULL;
5607 const bool persistentMap = (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED);
5608 if(persistentMap && m_hAllocator->m_UnmapPersistentlyMappedMemoryCounter == 0)
5610 res = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5611 m_hAllocator->m_hDevice,
5619 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
5620 m_hAllocator->FreeVulkanMemory(m_MemoryTypeIndex, blockSize, mem);
5626 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5629 (VMA_BLOCK_VECTOR_TYPE)m_BlockVectorType,
5631 allocInfo.allocationSize,
5635 m_Blocks.push_back(pBlock);
5636 if(pNewBlockIndex != VMA_NULL)
5638 *pNewBlockIndex = m_Blocks.size() - 1;
5644 #if VMA_STATS_STRING_ENABLED 5646 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5648 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5654 json.WriteString(
"MemoryTypeIndex");
5655 json.WriteNumber(m_MemoryTypeIndex);
5657 if(m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
5659 json.WriteString(
"Mapped");
5660 json.WriteBool(
true);
5663 json.WriteString(
"BlockSize");
5664 json.WriteNumber(m_PreferredBlockSize);
5666 json.WriteString(
"BlockCount");
5667 json.BeginObject(
true);
5668 if(m_MinBlockCount > 0)
5670 json.WriteString(
"Min");
5671 json.WriteNumber(m_MinBlockCount);
5673 if(m_MaxBlockCount < SIZE_MAX)
5675 json.WriteString(
"Max");
5676 json.WriteNumber(m_MaxBlockCount);
5678 json.WriteString(
"Cur");
5679 json.WriteNumber(m_Blocks.size());
5682 if(m_FrameInUseCount > 0)
5684 json.WriteString(
"FrameInUseCount");
5685 json.WriteNumber(m_FrameInUseCount);
5690 json.WriteString(
"PreferredBlockSize");
5691 json.WriteNumber(m_PreferredBlockSize);
5694 json.WriteString(
"Blocks");
5696 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5698 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
5705 #endif // #if VMA_STATS_STRING_ENABLED 5707 void VmaBlockVector::UnmapPersistentlyMappedMemory()
5709 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5711 for(
size_t i = m_Blocks.size(); i--; )
5713 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5714 if(pBlock->m_pMappedData != VMA_NULL)
5716 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
5717 (m_hAllocator->GetVulkanFunctions().vkUnmapMemory)(m_hAllocator->m_hDevice, pBlock->m_hMemory);
5718 pBlock->m_pMappedData = VMA_NULL;
5723 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
5725 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5727 VkResult finalResult = VK_SUCCESS;
5728 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
5730 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5731 if(pBlock->m_PersistentMap)
5733 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
5734 VkResult localResult = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5735 m_hAllocator->m_hDevice,
5740 &pBlock->m_pMappedData);
5741 if(localResult != VK_SUCCESS)
5743 finalResult = localResult;
5750 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5751 VmaAllocator hAllocator,
5752 uint32_t currentFrameIndex)
5754 if(m_pDefragmentator == VMA_NULL)
5756 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5762 return m_pDefragmentator;
5765 VkResult VmaBlockVector::Defragment(
5767 VkDeviceSize& maxBytesToMove,
5768 uint32_t& maxAllocationsToMove)
5770 if(m_pDefragmentator == VMA_NULL)
5775 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5778 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5781 if(pDefragmentationStats != VMA_NULL)
5783 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5784 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
5787 VMA_ASSERT(bytesMoved <= maxBytesToMove);
5788 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
5794 m_HasEmptyBlock =
false;
5795 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
5797 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
5798 if(pBlock->m_Metadata.IsEmpty())
5800 if(m_Blocks.size() > m_MinBlockCount)
5802 if(pDefragmentationStats != VMA_NULL)
5805 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
5808 VmaVectorRemove(m_Blocks, blockIndex);
5809 pBlock->Destroy(m_hAllocator);
5810 vma_delete(m_hAllocator, pBlock);
5814 m_HasEmptyBlock =
true;
5822 void VmaBlockVector::DestroyDefragmentator()
5824 if(m_pDefragmentator != VMA_NULL)
5826 vma_delete(m_hAllocator, m_pDefragmentator);
5827 m_pDefragmentator = VMA_NULL;
5831 void VmaBlockVector::MakePoolAllocationsLost(
5832 uint32_t currentFrameIndex,
5833 size_t* pLostAllocationCount)
5835 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5837 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5839 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5841 pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
5845 void VmaBlockVector::AddStats(
VmaStats* pStats)
5847 const uint32_t memTypeIndex = m_MemoryTypeIndex;
5848 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
5850 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5852 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5854 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5856 VMA_HEAVY_ASSERT(pBlock->Validate());
5858 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
5859 VmaAddStatInfo(pStats->
total, allocationStatInfo);
5860 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
5861 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
5868 VmaDefragmentator::VmaDefragmentator(
5869 VmaAllocator hAllocator,
5870 VmaBlockVector* pBlockVector,
5871 uint32_t currentFrameIndex) :
5872 m_hAllocator(hAllocator),
5873 m_pBlockVector(pBlockVector),
5874 m_CurrentFrameIndex(currentFrameIndex),
5876 m_AllocationsMoved(0),
5877 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
5878 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
5882 VmaDefragmentator::~VmaDefragmentator()
5884 for(
size_t i = m_Blocks.size(); i--; )
5886 vma_delete(m_hAllocator, m_Blocks[i]);
5890 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
5892 AllocationInfo allocInfo;
5893 allocInfo.m_hAllocation = hAlloc;
5894 allocInfo.m_pChanged = pChanged;
5895 m_Allocations.push_back(allocInfo);
5898 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
5901 if(m_pMappedDataForDefragmentation)
5903 *ppMappedData = m_pMappedDataForDefragmentation;
5908 if(m_pBlock->m_PersistentMap)
5910 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
5911 *ppMappedData = m_pBlock->m_pMappedData;
5916 VkResult res = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5917 hAllocator->m_hDevice,
5918 m_pBlock->m_hMemory,
5922 &m_pMappedDataForDefragmentation);
5923 *ppMappedData = m_pMappedDataForDefragmentation;
5927 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
5929 if(m_pMappedDataForDefragmentation != VMA_NULL)
5931 (hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_pBlock->m_hMemory);
5935 VkResult VmaDefragmentator::DefragmentRound(
5936 VkDeviceSize maxBytesToMove,
5937 uint32_t maxAllocationsToMove)
5939 if(m_Blocks.empty())
5944 size_t srcBlockIndex = m_Blocks.size() - 1;
5945 size_t srcAllocIndex = SIZE_MAX;
5951 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
5953 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
5956 if(srcBlockIndex == 0)
5963 srcAllocIndex = SIZE_MAX;
5968 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
5972 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
5973 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
5975 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
5976 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
5977 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
5978 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
5981 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
5983 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
5984 VmaAllocationRequest dstAllocRequest;
5985 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
5986 m_CurrentFrameIndex,
5987 m_pBlockVector->GetFrameInUseCount(),
5988 m_pBlockVector->GetBufferImageGranularity(),
5993 &dstAllocRequest) &&
5995 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
5997 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6000 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6001 (m_BytesMoved + size > maxBytesToMove))
6003 return VK_INCOMPLETE;
6006 void* pDstMappedData = VMA_NULL;
6007 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6008 if(res != VK_SUCCESS)
6013 void* pSrcMappedData = VMA_NULL;
6014 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6015 if(res != VK_SUCCESS)
6022 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6023 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6024 static_cast<size_t>(size));
6026 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6027 pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6029 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6031 if(allocInfo.m_pChanged != VMA_NULL)
6033 *allocInfo.m_pChanged = VK_TRUE;
6036 ++m_AllocationsMoved;
6037 m_BytesMoved += size;
6039 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6047 if(srcAllocIndex > 0)
6053 if(srcBlockIndex > 0)
6056 srcAllocIndex = SIZE_MAX;
6066 VkResult VmaDefragmentator::Defragment(
6067 VkDeviceSize maxBytesToMove,
6068 uint32_t maxAllocationsToMove)
6070 if(m_Allocations.empty())
6076 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6077 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6079 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6080 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6081 m_Blocks.push_back(pBlockInfo);
6085 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6088 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6090 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6092 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6094 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6095 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6096 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6098 (*it)->m_Allocations.push_back(allocInfo);
6106 m_Allocations.clear();
6108 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6110 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6111 pBlockInfo->CalcHasNonMovableAllocations();
6112 pBlockInfo->SortAllocationsBySizeDescecnding();
6116 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6119 VkResult result = VK_SUCCESS;
6120 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6122 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6126 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6128 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6134 bool VmaDefragmentator::MoveMakesSense(
6135 size_t dstBlockIndex, VkDeviceSize dstOffset,
6136 size_t srcBlockIndex, VkDeviceSize srcOffset)
6138 if(dstBlockIndex < srcBlockIndex)
6142 if(dstBlockIndex > srcBlockIndex)
6146 if(dstOffset < srcOffset)
6158 m_PhysicalDevice(pCreateInfo->physicalDevice),
6159 m_hDevice(pCreateInfo->device),
6160 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6161 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6162 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6163 m_UnmapPersistentlyMappedMemoryCounter(0),
6164 m_PreferredLargeHeapBlockSize(0),
6165 m_PreferredSmallHeapBlockSize(0),
6166 m_CurrentFrameIndex(0),
6167 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6171 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6172 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6173 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6175 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6176 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
6178 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6180 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6191 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6192 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6201 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6203 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6204 if(limit != VK_WHOLE_SIZE)
6206 m_HeapSizeLimit[heapIndex] = limit;
6207 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6209 m_MemProps.memoryHeaps[heapIndex].size = limit;
6215 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6217 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6219 for(
size_t blockVectorTypeIndex = 0; blockVectorTypeIndex < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorTypeIndex)
6221 m_pBlockVectors[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, VmaBlockVector)(
6224 static_cast<VMA_BLOCK_VECTOR_TYPE
>(blockVectorTypeIndex),
6228 GetBufferImageGranularity(),
6233 m_pOwnAllocations[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6238 VmaAllocator_T::~VmaAllocator_T()
6240 VMA_ASSERT(m_Pools.empty());
6242 for(
size_t i = GetMemoryTypeCount(); i--; )
6244 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
6246 vma_delete(
this, m_pOwnAllocations[i][j]);
6247 vma_delete(
this, m_pBlockVectors[i][j]);
6252 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6254 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6255 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6256 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6257 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6258 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6259 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6260 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6261 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6262 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6263 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6264 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6265 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6266 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6267 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6268 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6269 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6271 if(pVulkanFunctions != VMA_NULL)
6273 m_VulkanFunctions = *pVulkanFunctions;
6278 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6279 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6280 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6281 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6282 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6283 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6284 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6285 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6286 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6287 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6288 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6289 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6290 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6291 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6294 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6296 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6297 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6298 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6299 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6302 VkResult VmaAllocator_T::AllocateMemoryOfType(
6303 const VkMemoryRequirements& vkMemReq,
6305 uint32_t memTypeIndex,
6306 VmaSuballocationType suballocType,
6307 VmaAllocation* pAllocation)
6309 VMA_ASSERT(pAllocation != VMA_NULL);
6310 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6312 uint32_t blockVectorType = VmaAllocationCreateFlagsToBlockVectorType(createInfo.
flags);
6313 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6314 VMA_ASSERT(blockVector);
6316 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6318 const bool ownMemory =
6320 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
6322 vkMemReq.size > preferredBlockSize / 2);
6328 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6332 return AllocateOwnMemory(
6343 VkResult res = blockVector->Allocate(
6345 m_CurrentFrameIndex.load(),
6350 if(res == VK_SUCCESS)
6356 res = AllocateOwnMemory(
6361 createInfo.pUserData,
6363 if(res == VK_SUCCESS)
6366 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
6372 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6378 VkResult VmaAllocator_T::AllocateOwnMemory(
6380 VmaSuballocationType suballocType,
6381 uint32_t memTypeIndex,
6384 VmaAllocation* pAllocation)
6386 VMA_ASSERT(pAllocation);
6388 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6389 allocInfo.memoryTypeIndex = memTypeIndex;
6390 allocInfo.allocationSize = size;
6393 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6394 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6397 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6401 void* pMappedData =
nullptr;
6404 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
6406 res = (*m_VulkanFunctions.vkMapMemory)(
6415 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6416 FreeVulkanMemory(memTypeIndex, size, hMemory);
6422 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6423 (*pAllocation)->InitOwnAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
6427 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6428 AllocationVectorType* pOwnAllocations = m_pOwnAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
6429 VMA_ASSERT(pOwnAllocations);
6430 VmaVectorInsertSorted<VmaPointerLess>(*pOwnAllocations, *pAllocation);
6433 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
6438 VkResult VmaAllocator_T::AllocateMemory(
6439 const VkMemoryRequirements& vkMemReq,
6441 VmaSuballocationType suballocType,
6442 VmaAllocation* pAllocation)
6447 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6448 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6450 if((createInfo.
pool != VK_NULL_HANDLE) &&
6453 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT when pool != null is invalid.");
6454 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6457 if(createInfo.
pool != VK_NULL_HANDLE)
6459 return createInfo.
pool->m_BlockVector.Allocate(
6461 m_CurrentFrameIndex.load(),
6470 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6471 uint32_t memTypeIndex = UINT32_MAX;
6473 if(res == VK_SUCCESS)
6475 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6477 if(res == VK_SUCCESS)
6487 memoryTypeBits &= ~(1u << memTypeIndex);
6490 if(res == VK_SUCCESS)
6492 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6494 if(res == VK_SUCCESS)
6504 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6515 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6517 VMA_ASSERT(allocation);
6519 if(allocation->CanBecomeLost() ==
false ||
6520 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6522 switch(allocation->GetType())
6524 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6526 VmaBlockVector* pBlockVector = VMA_NULL;
6527 VmaPool hPool = allocation->GetPool();
6528 if(hPool != VK_NULL_HANDLE)
6530 pBlockVector = &hPool->m_BlockVector;
6534 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6535 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
6536 pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6538 pBlockVector->Free(allocation);
6541 case VmaAllocation_T::ALLOCATION_TYPE_OWN:
6542 FreeOwnMemory(allocation);
6549 vma_delete(
this, allocation);
6552 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6555 InitStatInfo(pStats->
total);
6556 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6558 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6562 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6564 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6565 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6567 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6568 VMA_ASSERT(pBlockVector);
6569 pBlockVector->AddStats(pStats);
6575 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6576 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6578 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
6583 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6585 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6586 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6587 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6589 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6590 VMA_ASSERT(pOwnAllocVector);
6591 for(
size_t allocIndex = 0, allocCount = pOwnAllocVector->size(); allocIndex < allocCount; ++allocIndex)
6594 (*pOwnAllocVector)[allocIndex]->OwnAllocCalcStatsInfo(allocationStatInfo);
6595 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6596 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6597 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6603 VmaPostprocessCalcStatInfo(pStats->
total);
6604 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6605 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6606 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6607 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6610 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6612 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
6614 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
6616 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6618 for(uint32_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
6620 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6621 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6622 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6626 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6627 AllocationVectorType* pOwnAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6628 for(
size_t ownAllocIndex = pOwnAllocationsVector->size(); ownAllocIndex--; )
6630 VmaAllocation hAlloc = (*pOwnAllocationsVector)[ownAllocIndex];
6631 hAlloc->OwnAllocUnmapPersistentlyMappedMemory(
this);
6637 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6638 pBlockVector->UnmapPersistentlyMappedMemory();
6645 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6646 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6648 m_Pools[poolIndex]->GetBlockVector().UnmapPersistentlyMappedMemory();
6655 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
6657 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
6658 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
6660 VkResult finalResult = VK_SUCCESS;
6661 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6665 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6666 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6668 m_Pools[poolIndex]->GetBlockVector().MapPersistentlyMappedMemory();
6672 for(uint32_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
6674 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6675 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6676 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6680 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6681 AllocationVectorType* pAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6682 for(
size_t ownAllocIndex = 0, ownAllocCount = pAllocationsVector->size(); ownAllocIndex < ownAllocCount; ++ownAllocIndex)
6684 VmaAllocation hAlloc = (*pAllocationsVector)[ownAllocIndex];
6685 hAlloc->OwnAllocMapPersistentlyMappedMemory(
this);
6691 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6692 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
6693 if(localResult != VK_SUCCESS)
6695 finalResult = localResult;
6707 VkResult VmaAllocator_T::Defragment(
6708 VmaAllocation* pAllocations,
6709 size_t allocationCount,
6710 VkBool32* pAllocationsChanged,
6714 if(pAllocationsChanged != VMA_NULL)
6716 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
6718 if(pDefragmentationStats != VMA_NULL)
6720 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
6723 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
6725 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
6726 return VK_ERROR_MEMORY_MAP_FAILED;
6729 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
6731 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
6733 const size_t poolCount = m_Pools.size();
6736 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
6738 VmaAllocation hAlloc = pAllocations[allocIndex];
6740 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
6742 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
6744 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
6746 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
6748 VmaBlockVector* pAllocBlockVector =
nullptr;
6750 const VmaPool hAllocPool = hAlloc->GetPool();
6752 if(hAllocPool != VK_NULL_HANDLE)
6754 pAllocBlockVector = &hAllocPool->GetBlockVector();
6759 pAllocBlockVector = m_pBlockVectors[memTypeIndex][hAlloc->GetBlockVectorType()];
6762 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
6764 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
6765 &pAllocationsChanged[allocIndex] : VMA_NULL;
6766 pDefragmentator->AddAllocation(hAlloc, pChanged);
6770 VkResult result = VK_SUCCESS;
6774 VkDeviceSize maxBytesToMove = SIZE_MAX;
6775 uint32_t maxAllocationsToMove = UINT32_MAX;
6776 if(pDefragmentationInfo != VMA_NULL)
6783 for(uint32_t memTypeIndex = 0;
6784 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
6788 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6790 for(uint32_t blockVectorType = 0;
6791 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
6794 result = m_pBlockVectors[memTypeIndex][blockVectorType]->Defragment(
6795 pDefragmentationStats,
6797 maxAllocationsToMove);
6803 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
6805 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
6806 pDefragmentationStats,
6808 maxAllocationsToMove);
6814 for(
size_t poolIndex = poolCount; poolIndex--; )
6816 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
6820 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
6822 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6824 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
6826 m_pBlockVectors[memTypeIndex][blockVectorType]->DestroyDefragmentator();
6834 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
6836 if(hAllocation->CanBecomeLost())
6842 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
6843 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
6846 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6850 pAllocationInfo->
offset = 0;
6851 pAllocationInfo->
size = hAllocation->GetSize();
6853 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6856 else if(localLastUseFrameIndex == localCurrFrameIndex)
6858 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6859 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6860 pAllocationInfo->
offset = hAllocation->GetOffset();
6861 pAllocationInfo->
size = hAllocation->GetSize();
6862 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6863 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6868 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
6870 localLastUseFrameIndex = localCurrFrameIndex;
6878 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6879 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6880 pAllocationInfo->
offset = hAllocation->GetOffset();
6881 pAllocationInfo->
size = hAllocation->GetSize();
6882 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6883 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6887 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
6889 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
6902 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
6904 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
6905 if(res != VK_SUCCESS)
6907 vma_delete(
this, *pPool);
6914 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6915 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
6921 void VmaAllocator_T::DestroyPool(VmaPool pool)
6925 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6926 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
6927 VMA_ASSERT(success &&
"Pool not found in Allocator.");
6930 vma_delete(
this, pool);
6933 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
6935 pool->m_BlockVector.GetPoolStats(pPoolStats);
6938 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
6940 m_CurrentFrameIndex.store(frameIndex);
6943 void VmaAllocator_T::MakePoolAllocationsLost(
6945 size_t* pLostAllocationCount)
6947 hPool->m_BlockVector.MakePoolAllocationsLost(
6948 m_CurrentFrameIndex.load(),
6949 pLostAllocationCount);
6952 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
6954 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
6955 (*pAllocation)->InitLost();
6958 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
6960 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
6963 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6965 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6966 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
6968 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6969 if(res == VK_SUCCESS)
6971 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
6976 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
6981 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6984 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
6986 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
6992 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
6994 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
6996 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
6999 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7001 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7002 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7004 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7005 m_HeapSizeLimit[heapIndex] += size;
7009 void VmaAllocator_T::FreeOwnMemory(VmaAllocation allocation)
7011 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_OWN);
7013 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7015 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
7016 AllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex][allocation->GetBlockVectorType()];
7017 VMA_ASSERT(pOwnAllocations);
7018 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pOwnAllocations, allocation);
7019 VMA_ASSERT(success);
7022 VkDeviceMemory hMemory = allocation->GetMemory();
7024 if(allocation->GetMappedData() != VMA_NULL)
7026 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7029 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7031 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
7034 #if VMA_STATS_STRING_ENABLED 7036 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7038 bool ownAllocationsStarted =
false;
7039 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7041 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
7042 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7044 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
7045 VMA_ASSERT(pOwnAllocVector);
7046 if(pOwnAllocVector->empty() ==
false)
7048 if(ownAllocationsStarted ==
false)
7050 ownAllocationsStarted =
true;
7051 json.WriteString(
"OwnAllocations");
7055 json.BeginString(
"Type ");
7056 json.ContinueString(memTypeIndex);
7057 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7059 json.ContinueString(
" Mapped");
7065 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
7067 const VmaAllocation hAlloc = (*pOwnAllocVector)[i];
7068 json.BeginObject(
true);
7070 json.WriteString(
"Size");
7071 json.WriteNumber(hAlloc->GetSize());
7073 json.WriteString(
"Type");
7074 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7083 if(ownAllocationsStarted)
7089 bool allocationsStarted =
false;
7090 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7092 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7094 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
7096 if(allocationsStarted ==
false)
7098 allocationsStarted =
true;
7099 json.WriteString(
"DefaultPools");
7103 json.BeginString(
"Type ");
7104 json.ContinueString(memTypeIndex);
7105 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7107 json.ContinueString(
" Mapped");
7111 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(json);
7115 if(allocationsStarted)
7122 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7123 const size_t poolCount = m_Pools.size();
7126 json.WriteString(
"Pools");
7128 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7130 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7137 #endif // #if VMA_STATS_STRING_ENABLED 7139 static VkResult AllocateMemoryForImage(
7140 VmaAllocator allocator,
7143 VmaSuballocationType suballocType,
7144 VmaAllocation* pAllocation)
7146 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7148 VkMemoryRequirements vkMemReq = {};
7149 (*allocator->GetVulkanFunctions().vkGetImageMemoryRequirements)(allocator->m_hDevice, image, &vkMemReq);
7151 return allocator->AllocateMemory(
7153 *pAllocationCreateInfo,
7163 VmaAllocator* pAllocator)
7165 VMA_ASSERT(pCreateInfo && pAllocator);
7166 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7172 VmaAllocator allocator)
7174 if(allocator != VK_NULL_HANDLE)
7176 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7177 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7178 vma_delete(&allocationCallbacks, allocator);
7183 VmaAllocator allocator,
7184 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7186 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7187 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7191 VmaAllocator allocator,
7192 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7194 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7195 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7199 VmaAllocator allocator,
7200 uint32_t memoryTypeIndex,
7201 VkMemoryPropertyFlags* pFlags)
7203 VMA_ASSERT(allocator && pFlags);
7204 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7205 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7209 VmaAllocator allocator,
7210 uint32_t frameIndex)
7212 VMA_ASSERT(allocator);
7213 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7215 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7217 allocator->SetCurrentFrameIndex(frameIndex);
7221 VmaAllocator allocator,
7224 VMA_ASSERT(allocator && pStats);
7225 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7226 allocator->CalculateStats(pStats);
7229 #if VMA_STATS_STRING_ENABLED 7232 VmaAllocator allocator,
7233 char** ppStatsString,
7234 VkBool32 detailedMap)
7236 VMA_ASSERT(allocator && ppStatsString);
7237 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7239 VmaStringBuilder sb(allocator);
7241 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7245 allocator->CalculateStats(&stats);
7247 json.WriteString(
"Total");
7248 VmaPrintStatInfo(json, stats.
total);
7250 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7252 json.BeginString(
"Heap ");
7253 json.ContinueString(heapIndex);
7257 json.WriteString(
"Size");
7258 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7260 json.WriteString(
"Flags");
7261 json.BeginArray(
true);
7262 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7264 json.WriteString(
"DEVICE_LOCAL");
7270 json.WriteString(
"Stats");
7271 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7274 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7276 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7278 json.BeginString(
"Type ");
7279 json.ContinueString(typeIndex);
7284 json.WriteString(
"Flags");
7285 json.BeginArray(
true);
7286 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7287 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7289 json.WriteString(
"DEVICE_LOCAL");
7291 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7293 json.WriteString(
"HOST_VISIBLE");
7295 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7297 json.WriteString(
"HOST_COHERENT");
7299 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7301 json.WriteString(
"HOST_CACHED");
7303 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7305 json.WriteString(
"LAZILY_ALLOCATED");
7311 json.WriteString(
"Stats");
7312 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7321 if(detailedMap == VK_TRUE)
7323 allocator->PrintDetailedMap(json);
7329 const size_t len = sb.GetLength();
7330 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7333 memcpy(pChars, sb.GetData(), len);
7336 *ppStatsString = pChars;
7340 VmaAllocator allocator,
7343 if(pStatsString != VMA_NULL)
7345 VMA_ASSERT(allocator);
7346 size_t len = strlen(pStatsString);
7347 vma_delete_array(allocator, pStatsString, len + 1);
7351 #endif // #if VMA_STATS_STRING_ENABLED 7356 VmaAllocator allocator,
7357 uint32_t memoryTypeBits,
7359 uint32_t* pMemoryTypeIndex)
7361 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7362 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7363 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7365 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7367 if(preferredFlags == 0)
7369 preferredFlags = requiredFlags;
7372 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7375 switch(pAllocationCreateInfo->
usage)
7380 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7383 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7386 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7387 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7390 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7391 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7399 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7402 *pMemoryTypeIndex = UINT32_MAX;
7403 uint32_t minCost = UINT32_MAX;
7404 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7405 memTypeIndex < allocator->GetMemoryTypeCount();
7406 ++memTypeIndex, memTypeBit <<= 1)
7409 if((memTypeBit & memoryTypeBits) != 0)
7411 const VkMemoryPropertyFlags currFlags =
7412 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7414 if((requiredFlags & ~currFlags) == 0)
7417 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7419 if(currCost < minCost)
7421 *pMemoryTypeIndex = memTypeIndex;
7431 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7435 VmaAllocator allocator,
7439 VMA_ASSERT(allocator && pCreateInfo && pPool);
7441 VMA_DEBUG_LOG(
"vmaCreatePool");
7443 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7445 return allocator->CreatePool(pCreateInfo, pPool);
7449 VmaAllocator allocator,
7452 VMA_ASSERT(allocator && pool);
7454 VMA_DEBUG_LOG(
"vmaDestroyPool");
7456 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7458 allocator->DestroyPool(pool);
7462 VmaAllocator allocator,
7466 VMA_ASSERT(allocator && pool && pPoolStats);
7468 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7470 allocator->GetPoolStats(pool, pPoolStats);
7474 VmaAllocator allocator,
7476 size_t* pLostAllocationCount)
7478 VMA_ASSERT(allocator && pool);
7480 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7482 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7486 VmaAllocator allocator,
7487 const VkMemoryRequirements* pVkMemoryRequirements,
7489 VmaAllocation* pAllocation,
7492 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7494 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7496 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7498 VkResult result = allocator->AllocateMemory(
7499 *pVkMemoryRequirements,
7501 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7504 if(pAllocationInfo && result == VK_SUCCESS)
7506 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7513 VmaAllocator allocator,
7516 VmaAllocation* pAllocation,
7519 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7521 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7523 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7525 VkMemoryRequirements vkMemReq = {};
7526 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, buffer, &vkMemReq);
7528 VkResult result = allocator->AllocateMemory(
7531 VMA_SUBALLOCATION_TYPE_BUFFER,
7534 if(pAllocationInfo && result == VK_SUCCESS)
7536 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7543 VmaAllocator allocator,
7546 VmaAllocation* pAllocation,
7549 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7551 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7553 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7555 VkResult result = AllocateMemoryForImage(
7559 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7562 if(pAllocationInfo && result == VK_SUCCESS)
7564 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7571 VmaAllocator allocator,
7572 VmaAllocation allocation)
7574 VMA_ASSERT(allocator && allocation);
7576 VMA_DEBUG_LOG(
"vmaFreeMemory");
7578 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7580 allocator->FreeMemory(allocation);
7584 VmaAllocator allocator,
7585 VmaAllocation allocation,
7588 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7590 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7592 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7596 VmaAllocator allocator,
7597 VmaAllocation allocation,
7600 VMA_ASSERT(allocator && allocation);
7602 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7604 allocation->SetUserData(pUserData);
7608 VmaAllocator allocator,
7609 VmaAllocation* pAllocation)
7611 VMA_ASSERT(allocator && pAllocation);
7613 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7615 allocator->CreateLostAllocation(pAllocation);
7619 VmaAllocator allocator,
7620 VmaAllocation allocation,
7623 VMA_ASSERT(allocator && allocation && ppData);
7625 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7627 return (*allocator->GetVulkanFunctions().vkMapMemory)(
7628 allocator->m_hDevice,
7629 allocation->GetMemory(),
7630 allocation->GetOffset(),
7631 allocation->GetSize(),
7637 VmaAllocator allocator,
7638 VmaAllocation allocation)
7640 VMA_ASSERT(allocator && allocation);
7642 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7644 (*allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, allocation->GetMemory());
7649 VMA_ASSERT(allocator);
7651 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7653 allocator->UnmapPersistentlyMappedMemory();
7658 VMA_ASSERT(allocator);
7660 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7662 return allocator->MapPersistentlyMappedMemory();
7666 VmaAllocator allocator,
7667 VmaAllocation* pAllocations,
7668 size_t allocationCount,
7669 VkBool32* pAllocationsChanged,
7673 VMA_ASSERT(allocator && pAllocations);
7675 VMA_DEBUG_LOG(
"vmaDefragment");
7677 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7679 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
7683 VmaAllocator allocator,
7684 const VkBufferCreateInfo* pBufferCreateInfo,
7687 VmaAllocation* pAllocation,
7690 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
7692 VMA_DEBUG_LOG(
"vmaCreateBuffer");
7694 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7696 *pBuffer = VK_NULL_HANDLE;
7697 *pAllocation = VK_NULL_HANDLE;
7700 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
7701 allocator->m_hDevice,
7703 allocator->GetAllocationCallbacks(),
7708 VkMemoryRequirements vkMemReq = {};
7709 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, *pBuffer, &vkMemReq);
7712 res = allocator->AllocateMemory(
7714 *pAllocationCreateInfo,
7715 VMA_SUBALLOCATION_TYPE_BUFFER,
7720 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
7721 allocator->m_hDevice,
7723 (*pAllocation)->GetMemory(),
7724 (*pAllocation)->GetOffset());
7728 if(pAllocationInfo != VMA_NULL)
7730 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7734 allocator->FreeMemory(*pAllocation);
7735 *pAllocation = VK_NULL_HANDLE;
7738 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
7739 *pBuffer = VK_NULL_HANDLE;
7746 VmaAllocator allocator,
7748 VmaAllocation allocation)
7750 if(buffer != VK_NULL_HANDLE)
7752 VMA_ASSERT(allocator);
7754 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
7756 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7758 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
7760 allocator->FreeMemory(allocation);
7765 VmaAllocator allocator,
7766 const VkImageCreateInfo* pImageCreateInfo,
7769 VmaAllocation* pAllocation,
7772 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
7774 VMA_DEBUG_LOG(
"vmaCreateImage");
7776 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7778 *pImage = VK_NULL_HANDLE;
7779 *pAllocation = VK_NULL_HANDLE;
7782 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
7783 allocator->m_hDevice,
7785 allocator->GetAllocationCallbacks(),
7789 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
7790 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
7791 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
7794 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
7798 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
7799 allocator->m_hDevice,
7801 (*pAllocation)->GetMemory(),
7802 (*pAllocation)->GetOffset());
7806 if(pAllocationInfo != VMA_NULL)
7808 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7812 allocator->FreeMemory(*pAllocation);
7813 *pAllocation = VK_NULL_HANDLE;
7816 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
7817 *pImage = VK_NULL_HANDLE;
7824 VmaAllocator allocator,
7826 VmaAllocation allocation)
7828 if(image != VK_NULL_HANDLE)
7830 VMA_ASSERT(allocator);
7832 VMA_DEBUG_LOG(
"vmaDestroyImage");
7834 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7836 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
7838 allocator->FreeMemory(allocation);
7842 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:446
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:469
Definition: vk_mem_alloc.h:800
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:456
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:651
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:450
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:928
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1081
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:852
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:700
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:733
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:409
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:481
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:802
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:528
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:463
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:478
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:593
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:453
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:439
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:592
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1085
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:498
VmaStatInfo total
Definition: vk_mem_alloc.h:602
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1093
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:716
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1076
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:454
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:472
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:806
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:938
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:451
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:735
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:822
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:858
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:809
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:709
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1071
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
Definition: vk_mem_alloc.h:780
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1089
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:452
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:598
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:689
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1091
VmaMemoryUsage
Definition: vk_mem_alloc.h:637
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:727
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:435
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VmaAllocatorFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:430
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:868
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:447
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:581
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:817
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:422
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:594
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:426
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:812
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:403
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:722
Definition: vk_mem_alloc.h:713
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:584
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:449
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:830
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:484
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:861
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:740
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:516
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:600
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:593
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:458
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:424
Definition: vk_mem_alloc.h:707
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:457
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:844
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:466
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:949
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:668
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:475
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:593
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:590
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:849
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:645
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaStats VmaStats
General statistics from current state of Allocator.
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:933
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1087
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:445
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:588
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:711
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:586
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:455
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:459
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:771
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:944
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
No intended memory usage specified.
Definition: vk_mem_alloc.h:640
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:448
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
Definition: vk_mem_alloc.h:652
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:914
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:648
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:656
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:594
Definition: vk_mem_alloc.h:437
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:679
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:642
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:601
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:855
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:594
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:798
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:919
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.