23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 429 #include <vulkan/vulkan.h> 436 VK_DEFINE_HANDLE(VmaAllocator)
440 VmaAllocator allocator,
442 VkDeviceMemory memory,
446 VmaAllocator allocator,
448 VkDeviceMemory memory,
564 VmaAllocator* pAllocator);
568 VmaAllocator allocator);
575 VmaAllocator allocator,
576 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
583 VmaAllocator allocator,
584 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
593 VmaAllocator allocator,
594 uint32_t memoryTypeIndex,
595 VkMemoryPropertyFlags* pFlags);
606 VmaAllocator allocator,
607 uint32_t frameIndex);
635 VmaAllocator allocator,
638 #define VMA_STATS_STRING_ENABLED 1 640 #if VMA_STATS_STRING_ENABLED 646 VmaAllocator allocator,
647 char** ppStatsString,
648 VkBool32 detailedMap);
651 VmaAllocator allocator,
654 #endif // #if VMA_STATS_STRING_ENABLED 663 VK_DEFINE_HANDLE(VmaPool)
786 VmaAllocator allocator,
787 uint32_t memoryTypeBits,
789 uint32_t* pMemoryTypeIndex);
899 VmaAllocator allocator,
906 VmaAllocator allocator,
916 VmaAllocator allocator,
927 VmaAllocator allocator,
929 size_t* pLostAllocationCount);
931 VK_DEFINE_HANDLE(VmaAllocation)
984 VmaAllocator allocator,
985 const VkMemoryRequirements* pVkMemoryRequirements,
987 VmaAllocation* pAllocation,
997 VmaAllocator allocator,
1000 VmaAllocation* pAllocation,
1005 VmaAllocator allocator,
1008 VmaAllocation* pAllocation,
1013 VmaAllocator allocator,
1014 VmaAllocation allocation);
1018 VmaAllocator allocator,
1019 VmaAllocation allocation,
1024 VmaAllocator allocator,
1025 VmaAllocation allocation,
1039 VmaAllocator allocator,
1040 VmaAllocation* pAllocation);
1051 VmaAllocator allocator,
1052 VmaAllocation allocation,
1056 VmaAllocator allocator,
1057 VmaAllocation allocation);
1185 VmaAllocator allocator,
1186 VmaAllocation* pAllocations,
1187 size_t allocationCount,
1188 VkBool32* pAllocationsChanged,
1218 VmaAllocator allocator,
1219 const VkBufferCreateInfo* pBufferCreateInfo,
1222 VmaAllocation* pAllocation,
1226 VmaAllocator allocator,
1228 VmaAllocation allocation);
1232 VmaAllocator allocator,
1233 const VkImageCreateInfo* pImageCreateInfo,
1236 VmaAllocation* pAllocation,
1240 VmaAllocator allocator,
1242 VmaAllocation allocation);
1246 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1249 #ifdef __INTELLISENSE__ 1250 #define VMA_IMPLEMENTATION 1253 #ifdef VMA_IMPLEMENTATION 1254 #undef VMA_IMPLEMENTATION 1276 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1277 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1289 #if VMA_USE_STL_CONTAINERS 1290 #define VMA_USE_STL_VECTOR 1 1291 #define VMA_USE_STL_UNORDERED_MAP 1 1292 #define VMA_USE_STL_LIST 1 1295 #if VMA_USE_STL_VECTOR 1299 #if VMA_USE_STL_UNORDERED_MAP 1300 #include <unordered_map> 1303 #if VMA_USE_STL_LIST 1312 #include <algorithm> 1316 #if !defined(_WIN32) 1323 #define VMA_ASSERT(expr) assert(expr) 1325 #define VMA_ASSERT(expr) 1331 #ifndef VMA_HEAVY_ASSERT 1333 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1335 #define VMA_HEAVY_ASSERT(expr) 1341 #define VMA_NULL nullptr 1344 #ifndef VMA_ALIGN_OF 1345 #define VMA_ALIGN_OF(type) (__alignof(type)) 1348 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1350 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1352 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1356 #ifndef VMA_SYSTEM_FREE 1358 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1360 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1365 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1369 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1373 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1377 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1380 #ifndef VMA_DEBUG_LOG 1381 #define VMA_DEBUG_LOG(format, ...) 1391 #if VMA_STATS_STRING_ENABLED 1392 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1394 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1396 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1398 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1400 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1402 snprintf(outStr, strLen,
"%p", ptr);
1412 void Lock() { m_Mutex.lock(); }
1413 void Unlock() { m_Mutex.unlock(); }
1417 #define VMA_MUTEX VmaMutex 1428 #ifndef VMA_ATOMIC_UINT32 1429 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1432 #ifndef VMA_BEST_FIT 1445 #define VMA_BEST_FIT (1) 1448 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 1453 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 1456 #ifndef VMA_DEBUG_ALIGNMENT 1461 #define VMA_DEBUG_ALIGNMENT (1) 1464 #ifndef VMA_DEBUG_MARGIN 1469 #define VMA_DEBUG_MARGIN (0) 1472 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1477 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1480 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1485 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1488 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1489 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1493 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1494 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1498 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1499 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1503 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1509 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1510 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1513 static inline uint32_t CountBitsSet(uint32_t v)
1515 uint32_t c = v - ((v >> 1) & 0x55555555);
1516 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1517 c = ((c >> 4) + c) & 0x0F0F0F0F;
1518 c = ((c >> 8) + c) & 0x00FF00FF;
1519 c = ((c >> 16) + c) & 0x0000FFFF;
1525 template <
typename T>
1526 static inline T VmaAlignUp(T val, T align)
1528 return (val + align - 1) / align * align;
1532 template <
typename T>
1533 inline T VmaRoundDiv(T x, T y)
1535 return (x + (y / (T)2)) / y;
1540 template<
typename Iterator,
typename Compare>
1541 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1543 Iterator centerValue = end; --centerValue;
1544 Iterator insertIndex = beg;
1545 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1547 if(cmp(*memTypeIndex, *centerValue))
1549 if(insertIndex != memTypeIndex)
1551 VMA_SWAP(*memTypeIndex, *insertIndex);
1556 if(insertIndex != centerValue)
1558 VMA_SWAP(*insertIndex, *centerValue);
1563 template<
typename Iterator,
typename Compare>
1564 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1568 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1569 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1570 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1574 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1576 #endif // #ifndef VMA_SORT 1585 static inline bool VmaBlocksOnSamePage(
1586 VkDeviceSize resourceAOffset,
1587 VkDeviceSize resourceASize,
1588 VkDeviceSize resourceBOffset,
1589 VkDeviceSize pageSize)
1591 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1592 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1593 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1594 VkDeviceSize resourceBStart = resourceBOffset;
1595 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1596 return resourceAEndPage == resourceBStartPage;
1599 enum VmaSuballocationType
1601 VMA_SUBALLOCATION_TYPE_FREE = 0,
1602 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1603 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1604 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1605 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1606 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1607 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1616 static inline bool VmaIsBufferImageGranularityConflict(
1617 VmaSuballocationType suballocType1,
1618 VmaSuballocationType suballocType2)
1620 if(suballocType1 > suballocType2)
1622 VMA_SWAP(suballocType1, suballocType2);
1625 switch(suballocType1)
1627 case VMA_SUBALLOCATION_TYPE_FREE:
1629 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1631 case VMA_SUBALLOCATION_TYPE_BUFFER:
1633 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1634 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1635 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1637 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1638 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1639 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1640 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1642 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1643 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1655 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1656 m_pMutex(useMutex ? &mutex : VMA_NULL)
1673 VMA_MUTEX* m_pMutex;
1676 #if VMA_DEBUG_GLOBAL_MUTEX 1677 static VMA_MUTEX gDebugGlobalMutex;
1678 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1680 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1684 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1695 template <
typename IterT,
typename KeyT,
typename CmpT>
1696 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1698 size_t down = 0, up = (end - beg);
1701 const size_t mid = (down + up) / 2;
1702 if(cmp(*(beg+mid), key))
1717 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1719 if((pAllocationCallbacks != VMA_NULL) &&
1720 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1722 return (*pAllocationCallbacks->pfnAllocation)(
1723 pAllocationCallbacks->pUserData,
1726 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1730 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1734 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1736 if((pAllocationCallbacks != VMA_NULL) &&
1737 (pAllocationCallbacks->pfnFree != VMA_NULL))
1739 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1743 VMA_SYSTEM_FREE(ptr);
1747 template<
typename T>
1748 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1750 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1753 template<
typename T>
1754 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1756 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1759 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1761 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1763 template<
typename T>
1764 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1767 VmaFree(pAllocationCallbacks, ptr);
1770 template<
typename T>
1771 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1775 for(
size_t i = count; i--; )
1779 VmaFree(pAllocationCallbacks, ptr);
1784 template<
typename T>
1785 class VmaStlAllocator
1788 const VkAllocationCallbacks*
const m_pCallbacks;
1789 typedef T value_type;
1791 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1792 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1794 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1795 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1797 template<
typename U>
1798 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1800 return m_pCallbacks == rhs.m_pCallbacks;
1802 template<
typename U>
1803 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1805 return m_pCallbacks != rhs.m_pCallbacks;
1808 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1811 #if VMA_USE_STL_VECTOR 1813 #define VmaVector std::vector 1815 template<
typename T,
typename allocatorT>
1816 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1818 vec.insert(vec.begin() + index, item);
1821 template<
typename T,
typename allocatorT>
1822 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1824 vec.erase(vec.begin() + index);
1827 #else // #if VMA_USE_STL_VECTOR 1832 template<
typename T,
typename AllocatorT>
1836 typedef T value_type;
1838 VmaVector(
const AllocatorT& allocator) :
1839 m_Allocator(allocator),
1846 VmaVector(
size_t count,
const AllocatorT& allocator) :
1847 m_Allocator(allocator),
1848 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1854 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1855 m_Allocator(src.m_Allocator),
1856 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
1857 m_Count(src.m_Count),
1858 m_Capacity(src.m_Count)
1862 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1868 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1871 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1875 resize(rhs.m_Count);
1878 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1884 bool empty()
const {
return m_Count == 0; }
1885 size_t size()
const {
return m_Count; }
1886 T* data() {
return m_pArray; }
1887 const T* data()
const {
return m_pArray; }
1889 T& operator[](
size_t index)
1891 VMA_HEAVY_ASSERT(index < m_Count);
1892 return m_pArray[index];
1894 const T& operator[](
size_t index)
const 1896 VMA_HEAVY_ASSERT(index < m_Count);
1897 return m_pArray[index];
1902 VMA_HEAVY_ASSERT(m_Count > 0);
1905 const T& front()
const 1907 VMA_HEAVY_ASSERT(m_Count > 0);
1912 VMA_HEAVY_ASSERT(m_Count > 0);
1913 return m_pArray[m_Count - 1];
1915 const T& back()
const 1917 VMA_HEAVY_ASSERT(m_Count > 0);
1918 return m_pArray[m_Count - 1];
1921 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1923 newCapacity = VMA_MAX(newCapacity, m_Count);
1925 if((newCapacity < m_Capacity) && !freeMemory)
1927 newCapacity = m_Capacity;
1930 if(newCapacity != m_Capacity)
1932 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1935 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1937 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1938 m_Capacity = newCapacity;
1939 m_pArray = newArray;
1943 void resize(
size_t newCount,
bool freeMemory =
false)
1945 size_t newCapacity = m_Capacity;
1946 if(newCount > m_Capacity)
1948 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1952 newCapacity = newCount;
1955 if(newCapacity != m_Capacity)
1957 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1958 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1959 if(elementsToCopy != 0)
1961 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1963 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1964 m_Capacity = newCapacity;
1965 m_pArray = newArray;
1971 void clear(
bool freeMemory =
false)
1973 resize(0, freeMemory);
1976 void insert(
size_t index,
const T& src)
1978 VMA_HEAVY_ASSERT(index <= m_Count);
1979 const size_t oldCount = size();
1980 resize(oldCount + 1);
1981 if(index < oldCount)
1983 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1985 m_pArray[index] = src;
1988 void remove(
size_t index)
1990 VMA_HEAVY_ASSERT(index < m_Count);
1991 const size_t oldCount = size();
1992 if(index < oldCount - 1)
1994 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1996 resize(oldCount - 1);
1999 void push_back(
const T& src)
2001 const size_t newIndex = size();
2002 resize(newIndex + 1);
2003 m_pArray[newIndex] = src;
2008 VMA_HEAVY_ASSERT(m_Count > 0);
2012 void push_front(
const T& src)
2019 VMA_HEAVY_ASSERT(m_Count > 0);
2023 typedef T* iterator;
2025 iterator begin() {
return m_pArray; }
2026 iterator end() {
return m_pArray + m_Count; }
2029 AllocatorT m_Allocator;
2035 template<
typename T,
typename allocatorT>
2036 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2038 vec.insert(index, item);
2041 template<
typename T,
typename allocatorT>
2042 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2047 #endif // #if VMA_USE_STL_VECTOR 2049 template<
typename CmpLess,
typename VectorT>
2050 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2052 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2054 vector.data() + vector.size(),
2056 CmpLess()) - vector.data();
2057 VmaVectorInsert(vector, indexToInsert, value);
2058 return indexToInsert;
2061 template<
typename CmpLess,
typename VectorT>
2062 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2065 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2070 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2072 size_t indexToRemove = it - vector.begin();
2073 VmaVectorRemove(vector, indexToRemove);
2079 template<
typename CmpLess,
typename VectorT>
2080 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2083 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2085 vector.data() + vector.size(),
2088 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2090 return it - vector.begin();
2094 return vector.size();
2106 template<
typename T>
2107 class VmaPoolAllocator
2110 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2111 ~VmaPoolAllocator();
2119 uint32_t NextFreeIndex;
2126 uint32_t FirstFreeIndex;
2129 const VkAllocationCallbacks* m_pAllocationCallbacks;
2130 size_t m_ItemsPerBlock;
2131 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2133 ItemBlock& CreateNewBlock();
2136 template<
typename T>
2137 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2138 m_pAllocationCallbacks(pAllocationCallbacks),
2139 m_ItemsPerBlock(itemsPerBlock),
2140 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2142 VMA_ASSERT(itemsPerBlock > 0);
2145 template<
typename T>
2146 VmaPoolAllocator<T>::~VmaPoolAllocator()
2151 template<
typename T>
2152 void VmaPoolAllocator<T>::Clear()
2154 for(
size_t i = m_ItemBlocks.size(); i--; )
2155 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2156 m_ItemBlocks.clear();
2159 template<
typename T>
2160 T* VmaPoolAllocator<T>::Alloc()
2162 for(
size_t i = m_ItemBlocks.size(); i--; )
2164 ItemBlock& block = m_ItemBlocks[i];
2166 if(block.FirstFreeIndex != UINT32_MAX)
2168 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2169 block.FirstFreeIndex = pItem->NextFreeIndex;
2170 return &pItem->Value;
2175 ItemBlock& newBlock = CreateNewBlock();
2176 Item*
const pItem = &newBlock.pItems[0];
2177 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2178 return &pItem->Value;
2181 template<
typename T>
2182 void VmaPoolAllocator<T>::Free(T* ptr)
2185 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2187 ItemBlock& block = m_ItemBlocks[i];
2191 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2194 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2196 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2197 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2198 block.FirstFreeIndex = index;
2202 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2205 template<
typename T>
2206 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2208 ItemBlock newBlock = {
2209 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2211 m_ItemBlocks.push_back(newBlock);
2214 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2215 newBlock.pItems[i].NextFreeIndex = i + 1;
2216 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2217 return m_ItemBlocks.back();
2223 #if VMA_USE_STL_LIST 2225 #define VmaList std::list 2227 #else // #if VMA_USE_STL_LIST 2229 template<
typename T>
2238 template<
typename T>
2242 typedef VmaListItem<T> ItemType;
2244 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2248 size_t GetCount()
const {
return m_Count; }
2249 bool IsEmpty()
const {
return m_Count == 0; }
2251 ItemType* Front() {
return m_pFront; }
2252 const ItemType* Front()
const {
return m_pFront; }
2253 ItemType* Back() {
return m_pBack; }
2254 const ItemType* Back()
const {
return m_pBack; }
2256 ItemType* PushBack();
2257 ItemType* PushFront();
2258 ItemType* PushBack(
const T& value);
2259 ItemType* PushFront(
const T& value);
2264 ItemType* InsertBefore(ItemType* pItem);
2266 ItemType* InsertAfter(ItemType* pItem);
2268 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2269 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2271 void Remove(ItemType* pItem);
2274 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2275 VmaPoolAllocator<ItemType> m_ItemAllocator;
2281 VmaRawList(
const VmaRawList<T>& src);
2282 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2285 template<
typename T>
2286 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2287 m_pAllocationCallbacks(pAllocationCallbacks),
2288 m_ItemAllocator(pAllocationCallbacks, 128),
2295 template<
typename T>
2296 VmaRawList<T>::~VmaRawList()
2302 template<
typename T>
2303 void VmaRawList<T>::Clear()
2305 if(IsEmpty() ==
false)
2307 ItemType* pItem = m_pBack;
2308 while(pItem != VMA_NULL)
2310 ItemType*
const pPrevItem = pItem->pPrev;
2311 m_ItemAllocator.Free(pItem);
2314 m_pFront = VMA_NULL;
2320 template<
typename T>
2321 VmaListItem<T>* VmaRawList<T>::PushBack()
2323 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2324 pNewItem->pNext = VMA_NULL;
2327 pNewItem->pPrev = VMA_NULL;
2328 m_pFront = pNewItem;
2334 pNewItem->pPrev = m_pBack;
2335 m_pBack->pNext = pNewItem;
2342 template<
typename T>
2343 VmaListItem<T>* VmaRawList<T>::PushFront()
2345 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2346 pNewItem->pPrev = VMA_NULL;
2349 pNewItem->pNext = VMA_NULL;
2350 m_pFront = pNewItem;
2356 pNewItem->pNext = m_pFront;
2357 m_pFront->pPrev = pNewItem;
2358 m_pFront = pNewItem;
2364 template<
typename T>
2365 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2367 ItemType*
const pNewItem = PushBack();
2368 pNewItem->Value = value;
2372 template<
typename T>
2373 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2375 ItemType*
const pNewItem = PushFront();
2376 pNewItem->Value = value;
2380 template<
typename T>
2381 void VmaRawList<T>::PopBack()
2383 VMA_HEAVY_ASSERT(m_Count > 0);
2384 ItemType*
const pBackItem = m_pBack;
2385 ItemType*
const pPrevItem = pBackItem->pPrev;
2386 if(pPrevItem != VMA_NULL)
2388 pPrevItem->pNext = VMA_NULL;
2390 m_pBack = pPrevItem;
2391 m_ItemAllocator.Free(pBackItem);
2395 template<
typename T>
2396 void VmaRawList<T>::PopFront()
2398 VMA_HEAVY_ASSERT(m_Count > 0);
2399 ItemType*
const pFrontItem = m_pFront;
2400 ItemType*
const pNextItem = pFrontItem->pNext;
2401 if(pNextItem != VMA_NULL)
2403 pNextItem->pPrev = VMA_NULL;
2405 m_pFront = pNextItem;
2406 m_ItemAllocator.Free(pFrontItem);
2410 template<
typename T>
2411 void VmaRawList<T>::Remove(ItemType* pItem)
2413 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2414 VMA_HEAVY_ASSERT(m_Count > 0);
2416 if(pItem->pPrev != VMA_NULL)
2418 pItem->pPrev->pNext = pItem->pNext;
2422 VMA_HEAVY_ASSERT(m_pFront == pItem);
2423 m_pFront = pItem->pNext;
2426 if(pItem->pNext != VMA_NULL)
2428 pItem->pNext->pPrev = pItem->pPrev;
2432 VMA_HEAVY_ASSERT(m_pBack == pItem);
2433 m_pBack = pItem->pPrev;
2436 m_ItemAllocator.Free(pItem);
2440 template<
typename T>
2441 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2443 if(pItem != VMA_NULL)
2445 ItemType*
const prevItem = pItem->pPrev;
2446 ItemType*
const newItem = m_ItemAllocator.Alloc();
2447 newItem->pPrev = prevItem;
2448 newItem->pNext = pItem;
2449 pItem->pPrev = newItem;
2450 if(prevItem != VMA_NULL)
2452 prevItem->pNext = newItem;
2456 VMA_HEAVY_ASSERT(m_pFront == pItem);
2466 template<
typename T>
2467 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2469 if(pItem != VMA_NULL)
2471 ItemType*
const nextItem = pItem->pNext;
2472 ItemType*
const newItem = m_ItemAllocator.Alloc();
2473 newItem->pNext = nextItem;
2474 newItem->pPrev = pItem;
2475 pItem->pNext = newItem;
2476 if(nextItem != VMA_NULL)
2478 nextItem->pPrev = newItem;
2482 VMA_HEAVY_ASSERT(m_pBack == pItem);
2492 template<
typename T>
2493 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2495 ItemType*
const newItem = InsertBefore(pItem);
2496 newItem->Value = value;
2500 template<
typename T>
2501 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2503 ItemType*
const newItem = InsertAfter(pItem);
2504 newItem->Value = value;
2508 template<
typename T,
typename AllocatorT>
2521 T& operator*()
const 2523 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2524 return m_pItem->Value;
2526 T* operator->()
const 2528 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2529 return &m_pItem->Value;
2532 iterator& operator++()
2534 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2535 m_pItem = m_pItem->pNext;
2538 iterator& operator--()
2540 if(m_pItem != VMA_NULL)
2542 m_pItem = m_pItem->pPrev;
2546 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2547 m_pItem = m_pList->Back();
2552 iterator operator++(
int)
2554 iterator result = *
this;
2558 iterator operator--(
int)
2560 iterator result = *
this;
2565 bool operator==(
const iterator& rhs)
const 2567 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2568 return m_pItem == rhs.m_pItem;
2570 bool operator!=(
const iterator& rhs)
const 2572 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2573 return m_pItem != rhs.m_pItem;
2577 VmaRawList<T>* m_pList;
2578 VmaListItem<T>* m_pItem;
2580 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2586 friend class VmaList<T, AllocatorT>;
2589 class const_iterator
2598 const_iterator(
const iterator& src) :
2599 m_pList(src.m_pList),
2600 m_pItem(src.m_pItem)
2604 const T& operator*()
const 2606 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2607 return m_pItem->Value;
2609 const T* operator->()
const 2611 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2612 return &m_pItem->Value;
2615 const_iterator& operator++()
2617 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2618 m_pItem = m_pItem->pNext;
2621 const_iterator& operator--()
2623 if(m_pItem != VMA_NULL)
2625 m_pItem = m_pItem->pPrev;
2629 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2630 m_pItem = m_pList->Back();
2635 const_iterator operator++(
int)
2637 const_iterator result = *
this;
2641 const_iterator operator--(
int)
2643 const_iterator result = *
this;
2648 bool operator==(
const const_iterator& rhs)
const 2650 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2651 return m_pItem == rhs.m_pItem;
2653 bool operator!=(
const const_iterator& rhs)
const 2655 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2656 return m_pItem != rhs.m_pItem;
2660 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2666 const VmaRawList<T>* m_pList;
2667 const VmaListItem<T>* m_pItem;
2669 friend class VmaList<T, AllocatorT>;
2672 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2674 bool empty()
const {
return m_RawList.IsEmpty(); }
2675 size_t size()
const {
return m_RawList.GetCount(); }
2677 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2678 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2680 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2681 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2683 void clear() { m_RawList.Clear(); }
2684 void push_back(
const T& value) { m_RawList.PushBack(value); }
2685 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2686 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2689 VmaRawList<T> m_RawList;
2692 #endif // #if VMA_USE_STL_LIST 2700 #if VMA_USE_STL_UNORDERED_MAP 2702 #define VmaPair std::pair 2704 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2705 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2707 #else // #if VMA_USE_STL_UNORDERED_MAP 2709 template<
typename T1,
typename T2>
2715 VmaPair() : first(), second() { }
2716 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2722 template<
typename KeyT,
typename ValueT>
2726 typedef VmaPair<KeyT, ValueT> PairType;
2727 typedef PairType* iterator;
2729 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2731 iterator begin() {
return m_Vector.begin(); }
2732 iterator end() {
return m_Vector.end(); }
2734 void insert(
const PairType& pair);
2735 iterator find(
const KeyT& key);
2736 void erase(iterator it);
2739 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2742 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2744 template<
typename FirstT,
typename SecondT>
2745 struct VmaPairFirstLess
2747 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2749 return lhs.first < rhs.first;
2751 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2753 return lhs.first < rhsFirst;
2757 template<
typename KeyT,
typename ValueT>
2758 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2760 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2762 m_Vector.data() + m_Vector.size(),
2764 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2765 VmaVectorInsert(m_Vector, indexToInsert, pair);
2768 template<
typename KeyT,
typename ValueT>
2769 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2771 PairType* it = VmaBinaryFindFirstNotLess(
2773 m_Vector.data() + m_Vector.size(),
2775 VmaPairFirstLess<KeyT, ValueT>());
2776 if((it != m_Vector.end()) && (it->first == key))
2782 return m_Vector.end();
2786 template<
typename KeyT,
typename ValueT>
2787 void VmaMap<KeyT, ValueT>::erase(iterator it)
2789 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2792 #endif // #if VMA_USE_STL_UNORDERED_MAP 2798 class VmaDeviceMemoryBlock;
2800 enum VMA_BLOCK_VECTOR_TYPE
2802 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2803 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2804 VMA_BLOCK_VECTOR_TYPE_COUNT
2810 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2811 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2814 struct VmaAllocation_T
2817 enum ALLOCATION_TYPE
2819 ALLOCATION_TYPE_NONE,
2820 ALLOCATION_TYPE_BLOCK,
2821 ALLOCATION_TYPE_OWN,
2824 VmaAllocation_T(uint32_t currentFrameIndex) :
2827 m_pUserData(VMA_NULL),
2828 m_Type(ALLOCATION_TYPE_NONE),
2829 m_SuballocationType(VMA_SUBALLOCATION_TYPE_UNKNOWN),
2830 m_LastUseFrameIndex(currentFrameIndex)
2834 void InitBlockAllocation(
2836 VmaDeviceMemoryBlock* block,
2837 VkDeviceSize offset,
2838 VkDeviceSize alignment,
2840 VmaSuballocationType suballocationType,
2844 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2845 VMA_ASSERT(block != VMA_NULL);
2846 m_Type = ALLOCATION_TYPE_BLOCK;
2847 m_Alignment = alignment;
2849 m_pUserData = pUserData;
2850 m_SuballocationType = suballocationType;
2851 m_BlockAllocation.m_hPool = hPool;
2852 m_BlockAllocation.m_Block = block;
2853 m_BlockAllocation.m_Offset = offset;
2854 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
2859 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2860 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
2861 m_Type = ALLOCATION_TYPE_BLOCK;
2862 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
2863 m_BlockAllocation.m_Block = VMA_NULL;
2864 m_BlockAllocation.m_Offset = 0;
2865 m_BlockAllocation.m_CanBecomeLost =
true;
2868 void ChangeBlockAllocation(
2869 VmaDeviceMemoryBlock* block,
2870 VkDeviceSize offset)
2872 VMA_ASSERT(block != VMA_NULL);
2873 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2874 m_BlockAllocation.m_Block = block;
2875 m_BlockAllocation.m_Offset = offset;
2878 void InitOwnAllocation(
2879 uint32_t memoryTypeIndex,
2880 VkDeviceMemory hMemory,
2881 VmaSuballocationType suballocationType,
2887 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2888 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2889 m_Type = ALLOCATION_TYPE_OWN;
2892 m_pUserData = pUserData;
2893 m_SuballocationType = suballocationType;
2894 m_OwnAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2895 m_OwnAllocation.m_hMemory = hMemory;
2896 m_OwnAllocation.m_PersistentMap = persistentMap;
2897 m_OwnAllocation.m_pMappedData = pMappedData;
2900 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2901 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2902 VkDeviceSize GetSize()
const {
return m_Size; }
2903 void* GetUserData()
const {
return m_pUserData; }
2904 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2905 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2907 VmaDeviceMemoryBlock* GetBlock()
const 2909 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2910 return m_BlockAllocation.m_Block;
2912 VkDeviceSize GetOffset()
const;
2913 VkDeviceMemory GetMemory()
const;
2914 uint32_t GetMemoryTypeIndex()
const;
2915 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2916 void* GetMappedData()
const;
2917 bool CanBecomeLost()
const;
2918 VmaPool GetPool()
const;
2920 VkResult OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator);
2921 void OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator);
2923 uint32_t GetLastUseFrameIndex()
const 2925 return m_LastUseFrameIndex.load();
2927 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
2929 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
2939 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
2943 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2955 VkDeviceSize m_Alignment;
2956 VkDeviceSize m_Size;
2958 ALLOCATION_TYPE m_Type;
2959 VmaSuballocationType m_SuballocationType;
2960 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
2963 struct BlockAllocation
2966 VmaDeviceMemoryBlock* m_Block;
2967 VkDeviceSize m_Offset;
2968 bool m_CanBecomeLost;
2972 struct OwnAllocation
2974 uint32_t m_MemoryTypeIndex;
2975 VkDeviceMemory m_hMemory;
2976 bool m_PersistentMap;
2977 void* m_pMappedData;
2983 BlockAllocation m_BlockAllocation;
2985 OwnAllocation m_OwnAllocation;
2993 struct VmaSuballocation
2995 VkDeviceSize offset;
2997 VmaAllocation hAllocation;
2998 VmaSuballocationType type;
3001 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3004 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3019 struct VmaAllocationRequest
3021 VkDeviceSize offset;
3022 VkDeviceSize sumFreeSize;
3023 VkDeviceSize sumItemSize;
3024 VmaSuballocationList::iterator item;
3025 size_t itemsToMakeLostCount;
3027 VkDeviceSize CalcCost()
const 3029 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3039 class VmaDeviceMemoryBlock
3042 uint32_t m_MemoryTypeIndex;
3043 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3044 VkDeviceMemory m_hMemory;
3045 VkDeviceSize m_Size;
3046 bool m_PersistentMap;
3047 void* m_pMappedData;
3048 uint32_t m_FreeCount;
3049 VkDeviceSize m_SumFreeSize;
3050 VmaSuballocationList m_Suballocations;
3053 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3055 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3057 ~VmaDeviceMemoryBlock()
3059 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3064 uint32_t newMemoryTypeIndex,
3065 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
3066 VkDeviceMemory newMemory,
3067 VkDeviceSize newSize,
3071 void Destroy(VmaAllocator allocator);
3074 bool Validate()
const;
3079 bool CreateAllocationRequest(
3080 uint32_t currentFrameIndex,
3081 uint32_t frameInUseCount,
3082 VkDeviceSize bufferImageGranularity,
3083 VkDeviceSize allocSize,
3084 VkDeviceSize allocAlignment,
3085 VmaSuballocationType allocType,
3086 bool canMakeOtherLost,
3087 VmaAllocationRequest* pAllocationRequest);
3089 bool MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest);
3091 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3094 bool IsEmpty()
const;
3099 const VmaAllocationRequest& request,
3100 VmaSuballocationType type,
3101 VkDeviceSize allocSize,
3102 VmaAllocation hAllocation);
3105 void Free(
const VmaAllocation allocation);
3107 #if VMA_STATS_STRING_ENABLED 3108 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3114 bool CheckAllocation(
3115 uint32_t currentFrameIndex,
3116 uint32_t frameInUseCount,
3117 VkDeviceSize bufferImageGranularity,
3118 VkDeviceSize allocSize,
3119 VkDeviceSize allocAlignment,
3120 VmaSuballocationType allocType,
3121 VmaSuballocationList::const_iterator suballocItem,
3122 bool canMakeOtherLost,
3123 VkDeviceSize* pOffset,
3124 size_t* itemsToMakeLostCount,
3125 VkDeviceSize* pSumFreeSize,
3126 VkDeviceSize* pSumItemSize)
const;
3129 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3133 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3136 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3139 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3141 bool ValidateFreeSuballocationList()
const;
3144 struct VmaPointerLess
3146 bool operator()(
const void* lhs,
const void* rhs)
const 3152 class VmaDefragmentator;
3160 struct VmaBlockVector
3163 VmaAllocator hAllocator,
3164 uint32_t memoryTypeIndex,
3165 VMA_BLOCK_VECTOR_TYPE blockVectorType,
3166 VkDeviceSize preferredBlockSize,
3167 size_t minBlockCount,
3168 size_t maxBlockCount,
3169 VkDeviceSize bufferImageGranularity,
3170 uint32_t frameInUseCount,
3174 VkResult CreateMinBlocks();
3176 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3177 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3178 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3179 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3180 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const {
return m_BlockVectorType; }
3184 bool IsEmpty()
const {
return m_Blocks.empty(); }
3187 VmaPool hCurrentPool,
3188 uint32_t currentFrameIndex,
3189 const VkMemoryRequirements& vkMemReq,
3191 VmaSuballocationType suballocType,
3192 VmaAllocation* pAllocation);
3195 VmaAllocation hAllocation);
3200 #if VMA_STATS_STRING_ENABLED 3201 void PrintDetailedMap(
class VmaJsonWriter& json);
3204 void UnmapPersistentlyMappedMemory();
3205 VkResult MapPersistentlyMappedMemory();
3207 void MakePoolAllocationsLost(
3208 uint32_t currentFrameIndex,
3209 size_t* pLostAllocationCount);
3211 VmaDefragmentator* EnsureDefragmentator(
3212 VmaAllocator hAllocator,
3213 uint32_t currentFrameIndex);
3215 VkResult Defragment(
3217 VkDeviceSize& maxBytesToMove,
3218 uint32_t& maxAllocationsToMove);
3220 void DestroyDefragmentator();
3223 friend class VmaDefragmentator;
3225 const VmaAllocator m_hAllocator;
3226 const uint32_t m_MemoryTypeIndex;
3227 const VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3228 const VkDeviceSize m_PreferredBlockSize;
3229 const size_t m_MinBlockCount;
3230 const size_t m_MaxBlockCount;
3231 const VkDeviceSize m_BufferImageGranularity;
3232 const uint32_t m_FrameInUseCount;
3233 const bool m_IsCustomPool;
3236 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3240 bool m_HasEmptyBlock;
3241 VmaDefragmentator* m_pDefragmentator;
3244 void Remove(VmaDeviceMemoryBlock* pBlock);
3248 void IncrementallySortBlocks();
3250 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3256 VmaBlockVector m_BlockVector;
3260 VmaAllocator hAllocator,
3264 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3266 #if VMA_STATS_STRING_ENABLED 3271 class VmaDefragmentator
3273 const VmaAllocator m_hAllocator;
3274 VmaBlockVector*
const m_pBlockVector;
3275 uint32_t m_CurrentFrameIndex;
3276 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3277 VkDeviceSize m_BytesMoved;
3278 uint32_t m_AllocationsMoved;
3280 struct AllocationInfo
3282 VmaAllocation m_hAllocation;
3283 VkBool32* m_pChanged;
3286 m_hAllocation(VK_NULL_HANDLE),
3287 m_pChanged(VMA_NULL)
3292 struct AllocationInfoSizeGreater
3294 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3296 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3301 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3305 VmaDeviceMemoryBlock* m_pBlock;
3306 bool m_HasNonMovableAllocations;
3307 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3309 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3311 m_HasNonMovableAllocations(true),
3312 m_Allocations(pAllocationCallbacks),
3313 m_pMappedDataForDefragmentation(VMA_NULL)
3317 void CalcHasNonMovableAllocations()
3319 const size_t blockAllocCount =
3320 m_pBlock->m_Suballocations.size() - m_pBlock->m_FreeCount;
3321 const size_t defragmentAllocCount = m_Allocations.size();
3322 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3325 void SortAllocationsBySizeDescecnding()
3327 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3330 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3331 void Unmap(VmaAllocator hAllocator);
3335 void* m_pMappedDataForDefragmentation;
3338 struct BlockPointerLess
3340 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3342 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3344 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3346 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3352 struct BlockInfoCompareMoveDestination
3354 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3356 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3360 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3364 if(pLhsBlockInfo->m_pBlock->m_SumFreeSize < pRhsBlockInfo->m_pBlock->m_SumFreeSize)
3372 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3373 BlockInfoVector m_Blocks;
3375 VkResult DefragmentRound(
3376 VkDeviceSize maxBytesToMove,
3377 uint32_t maxAllocationsToMove);
3379 static bool MoveMakesSense(
3380 size_t dstBlockIndex, VkDeviceSize dstOffset,
3381 size_t srcBlockIndex, VkDeviceSize srcOffset);
3385 VmaAllocator hAllocator,
3386 VmaBlockVector* pBlockVector,
3387 uint32_t currentFrameIndex);
3389 ~VmaDefragmentator();
3391 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3392 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3394 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3396 VkResult Defragment(
3397 VkDeviceSize maxBytesToMove,
3398 uint32_t maxAllocationsToMove);
3402 struct VmaAllocator_T
3406 bool m_AllocationCallbacksSpecified;
3407 VkAllocationCallbacks m_AllocationCallbacks;
3411 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
3414 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3415 VMA_MUTEX m_HeapSizeLimitMutex;
3417 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3418 VkPhysicalDeviceMemoryProperties m_MemProps;
3421 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3424 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3425 AllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3426 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
3431 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3433 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3437 return m_VulkanFunctions;
3440 VkDeviceSize GetBufferImageGranularity()
const 3443 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3444 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3447 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3448 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3450 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3452 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3453 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3457 VkResult AllocateMemory(
3458 const VkMemoryRequirements& vkMemReq,
3460 VmaSuballocationType suballocType,
3461 VmaAllocation* pAllocation);
3464 void FreeMemory(
const VmaAllocation allocation);
3466 void CalculateStats(
VmaStats* pStats);
3468 #if VMA_STATS_STRING_ENABLED 3469 void PrintDetailedMap(
class VmaJsonWriter& json);
3472 void UnmapPersistentlyMappedMemory();
3473 VkResult MapPersistentlyMappedMemory();
3475 VkResult Defragment(
3476 VmaAllocation* pAllocations,
3477 size_t allocationCount,
3478 VkBool32* pAllocationsChanged,
3482 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3485 void DestroyPool(VmaPool pool);
3486 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3488 void SetCurrentFrameIndex(uint32_t frameIndex);
3490 void MakePoolAllocationsLost(
3492 size_t* pLostAllocationCount);
3494 void CreateLostAllocation(VmaAllocation* pAllocation);
3496 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3497 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3500 VkDeviceSize m_PreferredLargeHeapBlockSize;
3501 VkDeviceSize m_PreferredSmallHeapBlockSize;
3503 VkPhysicalDevice m_PhysicalDevice;
3504 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3506 VMA_MUTEX m_PoolsMutex;
3508 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3514 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3516 VkResult AllocateMemoryOfType(
3517 const VkMemoryRequirements& vkMemReq,
3519 uint32_t memTypeIndex,
3520 VmaSuballocationType suballocType,
3521 VmaAllocation* pAllocation);
3524 VkResult AllocateOwnMemory(
3526 VmaSuballocationType suballocType,
3527 uint32_t memTypeIndex,
3530 VmaAllocation* pAllocation);
3533 void FreeOwnMemory(VmaAllocation allocation);
3539 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3541 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3544 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3546 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3549 template<
typename T>
3550 static T* VmaAllocate(VmaAllocator hAllocator)
3552 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3555 template<
typename T>
3556 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3558 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3561 template<
typename T>
3562 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3567 VmaFree(hAllocator, ptr);
3571 template<
typename T>
3572 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3576 for(
size_t i = count; i--; )
3578 VmaFree(hAllocator, ptr);
3585 #if VMA_STATS_STRING_ENABLED 3587 class VmaStringBuilder
3590 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3591 size_t GetLength()
const {
return m_Data.size(); }
3592 const char* GetData()
const {
return m_Data.data(); }
3594 void Add(
char ch) { m_Data.push_back(ch); }
3595 void Add(
const char* pStr);
3596 void AddNewLine() { Add(
'\n'); }
3597 void AddNumber(uint32_t num);
3598 void AddNumber(uint64_t num);
3599 void AddPointer(
const void* ptr);
3602 VmaVector< char, VmaStlAllocator<char> > m_Data;
3605 void VmaStringBuilder::Add(
const char* pStr)
3607 const size_t strLen = strlen(pStr);
3610 const size_t oldCount = m_Data.size();
3611 m_Data.resize(oldCount + strLen);
3612 memcpy(m_Data.data() + oldCount, pStr, strLen);
3616 void VmaStringBuilder::AddNumber(uint32_t num)
3619 VmaUint32ToStr(buf,
sizeof(buf), num);
3623 void VmaStringBuilder::AddNumber(uint64_t num)
3626 VmaUint64ToStr(buf,
sizeof(buf), num);
3630 void VmaStringBuilder::AddPointer(
const void* ptr)
3633 VmaPtrToStr(buf,
sizeof(buf), ptr);
3637 #endif // #if VMA_STATS_STRING_ENABLED 3642 #if VMA_STATS_STRING_ENABLED 3647 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3650 void BeginObject(
bool singleLine =
false);
3653 void BeginArray(
bool singleLine =
false);
3656 void WriteString(
const char* pStr);
3657 void BeginString(
const char* pStr = VMA_NULL);
3658 void ContinueString(
const char* pStr);
3659 void ContinueString(uint32_t n);
3660 void ContinueString(uint64_t n);
3661 void EndString(
const char* pStr = VMA_NULL);
3663 void WriteNumber(uint32_t n);
3664 void WriteNumber(uint64_t n);
3665 void WriteBool(
bool b);
3669 static const char*
const INDENT;
3671 enum COLLECTION_TYPE
3673 COLLECTION_TYPE_OBJECT,
3674 COLLECTION_TYPE_ARRAY,
3678 COLLECTION_TYPE type;
3679 uint32_t valueCount;
3680 bool singleLineMode;
3683 VmaStringBuilder& m_SB;
3684 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3685 bool m_InsideString;
3687 void BeginValue(
bool isString);
3688 void WriteIndent(
bool oneLess =
false);
3691 const char*
const VmaJsonWriter::INDENT =
" ";
3693 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3695 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3696 m_InsideString(false)
3700 VmaJsonWriter::~VmaJsonWriter()
3702 VMA_ASSERT(!m_InsideString);
3703 VMA_ASSERT(m_Stack.empty());
3706 void VmaJsonWriter::BeginObject(
bool singleLine)
3708 VMA_ASSERT(!m_InsideString);
3714 item.type = COLLECTION_TYPE_OBJECT;
3715 item.valueCount = 0;
3716 item.singleLineMode = singleLine;
3717 m_Stack.push_back(item);
3720 void VmaJsonWriter::EndObject()
3722 VMA_ASSERT(!m_InsideString);
3727 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3731 void VmaJsonWriter::BeginArray(
bool singleLine)
3733 VMA_ASSERT(!m_InsideString);
3739 item.type = COLLECTION_TYPE_ARRAY;
3740 item.valueCount = 0;
3741 item.singleLineMode = singleLine;
3742 m_Stack.push_back(item);
3745 void VmaJsonWriter::EndArray()
3747 VMA_ASSERT(!m_InsideString);
3752 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3756 void VmaJsonWriter::WriteString(
const char* pStr)
3762 void VmaJsonWriter::BeginString(
const char* pStr)
3764 VMA_ASSERT(!m_InsideString);
3768 m_InsideString =
true;
3769 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3771 ContinueString(pStr);
3775 void VmaJsonWriter::ContinueString(
const char* pStr)
3777 VMA_ASSERT(m_InsideString);
3779 const size_t strLen = strlen(pStr);
3780 for(
size_t i = 0; i < strLen; ++i)
3807 VMA_ASSERT(0 &&
"Character not currently supported.");
3813 void VmaJsonWriter::ContinueString(uint32_t n)
3815 VMA_ASSERT(m_InsideString);
3819 void VmaJsonWriter::ContinueString(uint64_t n)
3821 VMA_ASSERT(m_InsideString);
3825 void VmaJsonWriter::EndString(
const char* pStr)
3827 VMA_ASSERT(m_InsideString);
3828 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3830 ContinueString(pStr);
3833 m_InsideString =
false;
3836 void VmaJsonWriter::WriteNumber(uint32_t n)
3838 VMA_ASSERT(!m_InsideString);
3843 void VmaJsonWriter::WriteNumber(uint64_t n)
3845 VMA_ASSERT(!m_InsideString);
3850 void VmaJsonWriter::WriteBool(
bool b)
3852 VMA_ASSERT(!m_InsideString);
3854 m_SB.Add(b ?
"true" :
"false");
3857 void VmaJsonWriter::WriteNull()
3859 VMA_ASSERT(!m_InsideString);
3864 void VmaJsonWriter::BeginValue(
bool isString)
3866 if(!m_Stack.empty())
3868 StackItem& currItem = m_Stack.back();
3869 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3870 currItem.valueCount % 2 == 0)
3872 VMA_ASSERT(isString);
3875 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3876 currItem.valueCount % 2 != 0)
3880 else if(currItem.valueCount > 0)
3889 ++currItem.valueCount;
3893 void VmaJsonWriter::WriteIndent(
bool oneLess)
3895 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
3899 size_t count = m_Stack.size();
3900 if(count > 0 && oneLess)
3904 for(
size_t i = 0; i < count; ++i)
3911 #endif // #if VMA_STATS_STRING_ENABLED 3915 VkDeviceSize VmaAllocation_T::GetOffset()
const 3919 case ALLOCATION_TYPE_BLOCK:
3920 return m_BlockAllocation.m_Offset;
3921 case ALLOCATION_TYPE_OWN:
3929 VkDeviceMemory VmaAllocation_T::GetMemory()
const 3933 case ALLOCATION_TYPE_BLOCK:
3934 return m_BlockAllocation.m_Block->m_hMemory;
3935 case ALLOCATION_TYPE_OWN:
3936 return m_OwnAllocation.m_hMemory;
3939 return VK_NULL_HANDLE;
3943 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 3947 case ALLOCATION_TYPE_BLOCK:
3948 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
3949 case ALLOCATION_TYPE_OWN:
3950 return m_OwnAllocation.m_MemoryTypeIndex;
3957 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 3961 case ALLOCATION_TYPE_BLOCK:
3962 return m_BlockAllocation.m_Block->m_BlockVectorType;
3963 case ALLOCATION_TYPE_OWN:
3964 return (m_OwnAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
3967 return VMA_BLOCK_VECTOR_TYPE_COUNT;
3971 void* VmaAllocation_T::GetMappedData()
const 3975 case ALLOCATION_TYPE_BLOCK:
3976 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
3978 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
3985 case ALLOCATION_TYPE_OWN:
3986 return m_OwnAllocation.m_pMappedData;
3993 bool VmaAllocation_T::CanBecomeLost()
const 3997 case ALLOCATION_TYPE_BLOCK:
3998 return m_BlockAllocation.m_CanBecomeLost;
3999 case ALLOCATION_TYPE_OWN:
4007 VmaPool VmaAllocation_T::GetPool()
const 4009 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4010 return m_BlockAllocation.m_hPool;
4013 VkResult VmaAllocation_T::OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator)
4015 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4016 if(m_OwnAllocation.m_PersistentMap)
4018 return (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4019 hAllocator->m_hDevice,
4020 m_OwnAllocation.m_hMemory,
4024 &m_OwnAllocation.m_pMappedData);
4028 void VmaAllocation_T::OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator)
4030 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4031 if(m_OwnAllocation.m_pMappedData)
4033 VMA_ASSERT(m_OwnAllocation.m_PersistentMap);
4034 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_OwnAllocation.m_hMemory);
4035 m_OwnAllocation.m_pMappedData = VMA_NULL;
4040 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4042 VMA_ASSERT(CanBecomeLost());
4048 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4051 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4056 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4062 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4072 #if VMA_STATS_STRING_ENABLED 4075 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4084 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4088 json.WriteString(
"Blocks");
4091 json.WriteString(
"Allocations");
4094 json.WriteString(
"UnusedRanges");
4097 json.WriteString(
"UsedBytes");
4100 json.WriteString(
"UnusedBytes");
4105 json.WriteString(
"AllocationSize");
4106 json.BeginObject(
true);
4107 json.WriteString(
"Min");
4109 json.WriteString(
"Avg");
4111 json.WriteString(
"Max");
4118 json.WriteString(
"UnusedRangeSize");
4119 json.BeginObject(
true);
4120 json.WriteString(
"Min");
4122 json.WriteString(
"Avg");
4124 json.WriteString(
"Max");
4132 #endif // #if VMA_STATS_STRING_ENABLED 4134 struct VmaSuballocationItemSizeLess
4137 const VmaSuballocationList::iterator lhs,
4138 const VmaSuballocationList::iterator rhs)
const 4140 return lhs->size < rhs->size;
4143 const VmaSuballocationList::iterator lhs,
4144 VkDeviceSize rhsSize)
const 4146 return lhs->size < rhsSize;
4150 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
4151 m_MemoryTypeIndex(UINT32_MAX),
4152 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
4153 m_hMemory(VK_NULL_HANDLE),
4155 m_PersistentMap(false),
4156 m_pMappedData(VMA_NULL),
4159 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4160 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4164 void VmaDeviceMemoryBlock::Init(
4165 uint32_t newMemoryTypeIndex,
4166 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
4167 VkDeviceMemory newMemory,
4168 VkDeviceSize newSize,
4172 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4174 m_MemoryTypeIndex = newMemoryTypeIndex;
4175 m_BlockVectorType = newBlockVectorType;
4176 m_hMemory = newMemory;
4178 m_PersistentMap = persistentMap;
4179 m_pMappedData = pMappedData;
4181 m_SumFreeSize = newSize;
4183 m_Suballocations.clear();
4184 m_FreeSuballocationsBySize.clear();
4186 VmaSuballocation suballoc = {};
4187 suballoc.offset = 0;
4188 suballoc.size = newSize;
4189 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4190 suballoc.hAllocation = VK_NULL_HANDLE;
4192 m_Suballocations.push_back(suballoc);
4193 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4195 m_FreeSuballocationsBySize.push_back(suballocItem);
4198 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
4202 VMA_ASSERT(IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
4204 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
4205 if(m_pMappedData != VMA_NULL)
4207 (allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, m_hMemory);
4208 m_pMappedData = VMA_NULL;
4211 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Size, m_hMemory);
4212 m_hMemory = VK_NULL_HANDLE;
4215 bool VmaDeviceMemoryBlock::Validate()
const 4217 if((m_hMemory == VK_NULL_HANDLE) ||
4219 m_Suballocations.empty())
4225 VkDeviceSize calculatedOffset = 0;
4227 uint32_t calculatedFreeCount = 0;
4229 VkDeviceSize calculatedSumFreeSize = 0;
4232 size_t freeSuballocationsToRegister = 0;
4234 bool prevFree =
false;
4236 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4237 suballocItem != m_Suballocations.cend();
4240 const VmaSuballocation& subAlloc = *suballocItem;
4243 if(subAlloc.offset != calculatedOffset)
4248 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4250 if(prevFree && currFree)
4254 prevFree = currFree;
4256 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4263 calculatedSumFreeSize += subAlloc.size;
4264 ++calculatedFreeCount;
4265 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4267 ++freeSuballocationsToRegister;
4271 calculatedOffset += subAlloc.size;
4276 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4281 VkDeviceSize lastSize = 0;
4282 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4284 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4287 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4292 if(suballocItem->size < lastSize)
4297 lastSize = suballocItem->size;
4302 (calculatedOffset == m_Size) &&
4303 (calculatedSumFreeSize == m_SumFreeSize) &&
4304 (calculatedFreeCount == m_FreeCount);
4317 bool VmaDeviceMemoryBlock::CreateAllocationRequest(
4318 uint32_t currentFrameIndex,
4319 uint32_t frameInUseCount,
4320 VkDeviceSize bufferImageGranularity,
4321 VkDeviceSize allocSize,
4322 VkDeviceSize allocAlignment,
4323 VmaSuballocationType allocType,
4324 bool canMakeOtherLost,
4325 VmaAllocationRequest* pAllocationRequest)
4327 VMA_ASSERT(allocSize > 0);
4328 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4329 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4330 VMA_HEAVY_ASSERT(Validate());
4333 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4339 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4340 if(freeSuballocCount > 0)
4345 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4346 m_FreeSuballocationsBySize.data(),
4347 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4349 VmaSuballocationItemSizeLess());
4350 size_t index = it - m_FreeSuballocationsBySize.data();
4351 for(; index < freeSuballocCount; ++index)
4356 bufferImageGranularity,
4360 m_FreeSuballocationsBySize[index],
4362 &pAllocationRequest->offset,
4363 &pAllocationRequest->itemsToMakeLostCount,
4364 &pAllocationRequest->sumFreeSize,
4365 &pAllocationRequest->sumItemSize))
4367 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4375 for(
size_t index = freeSuballocCount; index--; )
4380 bufferImageGranularity,
4384 m_FreeSuballocationsBySize[index],
4386 &pAllocationRequest->offset,
4387 &pAllocationRequest->itemsToMakeLostCount,
4388 &pAllocationRequest->sumFreeSize,
4389 &pAllocationRequest->sumItemSize))
4391 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4398 if(canMakeOtherLost)
4402 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4403 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4405 VmaAllocationRequest tmpAllocRequest = {};
4406 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4407 suballocIt != m_Suballocations.end();
4410 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4411 suballocIt->hAllocation->CanBecomeLost())
4416 bufferImageGranularity,
4422 &tmpAllocRequest.offset,
4423 &tmpAllocRequest.itemsToMakeLostCount,
4424 &tmpAllocRequest.sumFreeSize,
4425 &tmpAllocRequest.sumItemSize))
4427 tmpAllocRequest.item = suballocIt;
4429 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4431 *pAllocationRequest = tmpAllocRequest;
4437 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4446 bool VmaDeviceMemoryBlock::MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest)
4448 while(pAllocationRequest->itemsToMakeLostCount > 0)
4450 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4452 ++pAllocationRequest->item;
4454 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4455 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4456 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4457 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4459 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4460 --pAllocationRequest->itemsToMakeLostCount;
4468 VMA_HEAVY_ASSERT(Validate());
4469 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4470 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4475 uint32_t VmaDeviceMemoryBlock::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4477 uint32_t lostAllocationCount = 0;
4478 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4479 it != m_Suballocations.end();
4482 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4483 it->hAllocation->CanBecomeLost() &&
4484 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4486 it = FreeSuballocation(it);
4487 ++lostAllocationCount;
4490 return lostAllocationCount;
4493 bool VmaDeviceMemoryBlock::CheckAllocation(
4494 uint32_t currentFrameIndex,
4495 uint32_t frameInUseCount,
4496 VkDeviceSize bufferImageGranularity,
4497 VkDeviceSize allocSize,
4498 VkDeviceSize allocAlignment,
4499 VmaSuballocationType allocType,
4500 VmaSuballocationList::const_iterator suballocItem,
4501 bool canMakeOtherLost,
4502 VkDeviceSize* pOffset,
4503 size_t* itemsToMakeLostCount,
4504 VkDeviceSize* pSumFreeSize,
4505 VkDeviceSize* pSumItemSize)
const 4507 VMA_ASSERT(allocSize > 0);
4508 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4509 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4510 VMA_ASSERT(pOffset != VMA_NULL);
4512 *itemsToMakeLostCount = 0;
4516 if(canMakeOtherLost)
4518 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4520 *pSumFreeSize = suballocItem->size;
4524 if(suballocItem->hAllocation->CanBecomeLost() &&
4525 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4527 ++*itemsToMakeLostCount;
4528 *pSumItemSize = suballocItem->size;
4537 if(m_Size - suballocItem->offset < allocSize)
4543 *pOffset = suballocItem->offset;
4546 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4548 *pOffset += VMA_DEBUG_MARGIN;
4552 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4553 *pOffset = VmaAlignUp(*pOffset, alignment);
4557 if(bufferImageGranularity > 1)
4559 bool bufferImageGranularityConflict =
false;
4560 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4561 while(prevSuballocItem != m_Suballocations.cbegin())
4564 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4565 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4567 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4569 bufferImageGranularityConflict =
true;
4577 if(bufferImageGranularityConflict)
4579 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4585 if(*pOffset >= suballocItem->offset + suballocItem->size)
4591 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
4594 VmaSuballocationList::const_iterator next = suballocItem;
4596 const VkDeviceSize requiredEndMargin =
4597 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4599 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
4601 if(suballocItem->offset + totalSize > m_Size)
4608 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
4609 if(totalSize > suballocItem->size)
4611 VkDeviceSize remainingSize = totalSize - suballocItem->size;
4612 while(remainingSize > 0)
4615 if(lastSuballocItem == m_Suballocations.cend())
4619 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4621 *pSumFreeSize += lastSuballocItem->size;
4625 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
4626 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
4627 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4629 ++*itemsToMakeLostCount;
4630 *pSumItemSize += lastSuballocItem->size;
4637 remainingSize = (lastSuballocItem->size < remainingSize) ?
4638 remainingSize - lastSuballocItem->size : 0;
4644 if(bufferImageGranularity > 1)
4646 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
4648 while(nextSuballocItem != m_Suballocations.cend())
4650 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4651 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4653 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4655 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
4656 if(nextSuballoc.hAllocation->CanBecomeLost() &&
4657 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4659 ++*itemsToMakeLostCount;
4678 const VmaSuballocation& suballoc = *suballocItem;
4679 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4681 *pSumFreeSize = suballoc.size;
4684 if(suballoc.size < allocSize)
4690 *pOffset = suballoc.offset;
4693 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4695 *pOffset += VMA_DEBUG_MARGIN;
4699 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4700 *pOffset = VmaAlignUp(*pOffset, alignment);
4704 if(bufferImageGranularity > 1)
4706 bool bufferImageGranularityConflict =
false;
4707 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4708 while(prevSuballocItem != m_Suballocations.cbegin())
4711 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4712 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4714 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4716 bufferImageGranularityConflict =
true;
4724 if(bufferImageGranularityConflict)
4726 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4731 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
4734 VmaSuballocationList::const_iterator next = suballocItem;
4736 const VkDeviceSize requiredEndMargin =
4737 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4740 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
4747 if(bufferImageGranularity > 1)
4749 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
4751 while(nextSuballocItem != m_Suballocations.cend())
4753 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4754 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4756 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4775 bool VmaDeviceMemoryBlock::IsEmpty()
const 4777 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4780 void VmaDeviceMemoryBlock::Alloc(
4781 const VmaAllocationRequest& request,
4782 VmaSuballocationType type,
4783 VkDeviceSize allocSize,
4784 VmaAllocation hAllocation)
4786 VMA_ASSERT(request.item != m_Suballocations.end());
4787 VmaSuballocation& suballoc = *request.item;
4789 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4791 VMA_ASSERT(request.offset >= suballoc.offset);
4792 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4793 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4794 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4798 UnregisterFreeSuballocation(request.item);
4800 suballoc.offset = request.offset;
4801 suballoc.size = allocSize;
4802 suballoc.type = type;
4803 suballoc.hAllocation = hAllocation;
4808 VmaSuballocation paddingSuballoc = {};
4809 paddingSuballoc.offset = request.offset + allocSize;
4810 paddingSuballoc.size = paddingEnd;
4811 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4812 VmaSuballocationList::iterator next = request.item;
4814 const VmaSuballocationList::iterator paddingEndItem =
4815 m_Suballocations.insert(next, paddingSuballoc);
4816 RegisterFreeSuballocation(paddingEndItem);
4822 VmaSuballocation paddingSuballoc = {};
4823 paddingSuballoc.offset = request.offset - paddingBegin;
4824 paddingSuballoc.size = paddingBegin;
4825 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4826 const VmaSuballocationList::iterator paddingBeginItem =
4827 m_Suballocations.insert(request.item, paddingSuballoc);
4828 RegisterFreeSuballocation(paddingBeginItem);
4832 m_FreeCount = m_FreeCount - 1;
4833 if(paddingBegin > 0)
4841 m_SumFreeSize -= allocSize;
4844 VmaSuballocationList::iterator VmaDeviceMemoryBlock::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
4847 VmaSuballocation& suballoc = *suballocItem;
4848 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4849 suballoc.hAllocation = VK_NULL_HANDLE;
4853 m_SumFreeSize += suballoc.size;
4856 bool mergeWithNext =
false;
4857 bool mergeWithPrev =
false;
4859 VmaSuballocationList::iterator nextItem = suballocItem;
4861 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
4863 mergeWithNext =
true;
4866 VmaSuballocationList::iterator prevItem = suballocItem;
4867 if(suballocItem != m_Suballocations.begin())
4870 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4872 mergeWithPrev =
true;
4878 UnregisterFreeSuballocation(nextItem);
4879 MergeFreeWithNext(suballocItem);
4884 UnregisterFreeSuballocation(prevItem);
4885 MergeFreeWithNext(prevItem);
4886 RegisterFreeSuballocation(prevItem);
4891 RegisterFreeSuballocation(suballocItem);
4892 return suballocItem;
4896 void VmaDeviceMemoryBlock::Free(
const VmaAllocation allocation)
4898 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4899 suballocItem != m_Suballocations.end();
4902 VmaSuballocation& suballoc = *suballocItem;
4903 if(suballoc.hAllocation == allocation)
4905 FreeSuballocation(suballocItem);
4906 VMA_HEAVY_ASSERT(Validate());
4910 VMA_ASSERT(0 &&
"Not found!");
4913 #if VMA_STATS_STRING_ENABLED 4915 void VmaDeviceMemoryBlock::PrintDetailedMap(
class VmaJsonWriter& json)
const 4919 json.WriteString(
"TotalBytes");
4920 json.WriteNumber(m_Size);
4922 json.WriteString(
"UnusedBytes");
4923 json.WriteNumber(m_SumFreeSize);
4925 json.WriteString(
"Allocations");
4926 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4928 json.WriteString(
"UnusedRanges");
4929 json.WriteNumber(m_FreeCount);
4931 json.WriteString(
"Suballocations");
4934 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4935 suballocItem != m_Suballocations.cend();
4936 ++suballocItem, ++i)
4938 json.BeginObject(
true);
4940 json.WriteString(
"Type");
4941 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4943 json.WriteString(
"Size");
4944 json.WriteNumber(suballocItem->size);
4946 json.WriteString(
"Offset");
4947 json.WriteNumber(suballocItem->offset);
4956 #endif // #if VMA_STATS_STRING_ENABLED 4958 void VmaDeviceMemoryBlock::MergeFreeWithNext(VmaSuballocationList::iterator item)
4960 VMA_ASSERT(item != m_Suballocations.end());
4961 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4963 VmaSuballocationList::iterator nextItem = item;
4965 VMA_ASSERT(nextItem != m_Suballocations.end());
4966 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
4968 item->size += nextItem->size;
4970 m_Suballocations.erase(nextItem);
4973 void VmaDeviceMemoryBlock::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
4975 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4976 VMA_ASSERT(item->size > 0);
4980 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
4982 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4984 if(m_FreeSuballocationsBySize.empty())
4986 m_FreeSuballocationsBySize.push_back(item);
4990 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
4998 void VmaDeviceMemoryBlock::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5000 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5001 VMA_ASSERT(item->size > 0);
5005 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5007 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5009 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5010 m_FreeSuballocationsBySize.data(),
5011 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5013 VmaSuballocationItemSizeLess());
5014 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5015 index < m_FreeSuballocationsBySize.size();
5018 if(m_FreeSuballocationsBySize[index] == item)
5020 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5023 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5025 VMA_ASSERT(0 &&
"Not found.");
5031 bool VmaDeviceMemoryBlock::ValidateFreeSuballocationList()
const 5033 VkDeviceSize lastSize = 0;
5034 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5036 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5038 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5043 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5048 if(it->size < lastSize)
5054 lastSize = it->size;
5061 memset(&outInfo, 0,
sizeof(outInfo));
5066 static void CalcAllocationStatInfo(
VmaStatInfo& outInfo,
const VmaDeviceMemoryBlock& block)
5070 const uint32_t rangeCount = (uint32_t)block.m_Suballocations.size();
5082 for(VmaSuballocationList::const_iterator suballocItem = block.m_Suballocations.cbegin();
5083 suballocItem != block.m_Suballocations.cend();
5086 const VmaSuballocation& suballoc = *suballocItem;
5087 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5114 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5122 VmaPool_T::VmaPool_T(
5123 VmaAllocator hAllocator,
5127 createInfo.memoryTypeIndex,
5129 VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
5130 createInfo.blockSize,
5131 createInfo.minBlockCount,
5132 createInfo.maxBlockCount,
5134 createInfo.frameInUseCount,
5139 VmaPool_T::~VmaPool_T()
5143 #if VMA_STATS_STRING_ENABLED 5145 #endif // #if VMA_STATS_STRING_ENABLED 5147 VmaBlockVector::VmaBlockVector(
5148 VmaAllocator hAllocator,
5149 uint32_t memoryTypeIndex,
5150 VMA_BLOCK_VECTOR_TYPE blockVectorType,
5151 VkDeviceSize preferredBlockSize,
5152 size_t minBlockCount,
5153 size_t maxBlockCount,
5154 VkDeviceSize bufferImageGranularity,
5155 uint32_t frameInUseCount,
5156 bool isCustomPool) :
5157 m_hAllocator(hAllocator),
5158 m_MemoryTypeIndex(memoryTypeIndex),
5159 m_BlockVectorType(blockVectorType),
5160 m_PreferredBlockSize(preferredBlockSize),
5161 m_MinBlockCount(minBlockCount),
5162 m_MaxBlockCount(maxBlockCount),
5163 m_BufferImageGranularity(bufferImageGranularity),
5164 m_FrameInUseCount(frameInUseCount),
5165 m_IsCustomPool(isCustomPool),
5166 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5167 m_HasEmptyBlock(false),
5168 m_pDefragmentator(VMA_NULL)
5172 VmaBlockVector::~VmaBlockVector()
5174 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5176 for(
size_t i = m_Blocks.size(); i--; )
5178 m_Blocks[i]->Destroy(m_hAllocator);
5179 vma_delete(m_hAllocator, m_Blocks[i]);
5183 VkResult VmaBlockVector::CreateMinBlocks()
5185 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5187 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5188 if(res != VK_SUCCESS)
5196 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5203 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5205 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5207 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5209 VMA_HEAVY_ASSERT(pBlock->Validate());
5211 const uint32_t rangeCount = (uint32_t)pBlock->m_Suballocations.size();
5213 pStats->
size += pBlock->m_Size;
5220 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5222 VkResult VmaBlockVector::Allocate(
5223 VmaPool hCurrentPool,
5224 uint32_t currentFrameIndex,
5225 const VkMemoryRequirements& vkMemReq,
5227 VmaSuballocationType suballocType,
5228 VmaAllocation* pAllocation)
5232 (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED))
5234 VMA_ASSERT(0 &&
"Usage of VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT must match VMA_POOL_CREATE_PERSISTENT_MAP_BIT.");
5235 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5238 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5242 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5244 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5245 VMA_ASSERT(pCurrBlock);
5246 VmaAllocationRequest currRequest = {};
5247 if(pCurrBlock->CreateAllocationRequest(
5250 m_BufferImageGranularity,
5258 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5261 if(pCurrBlock->IsEmpty())
5263 m_HasEmptyBlock =
false;
5266 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5267 pCurrBlock->Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5268 (*pAllocation)->InitBlockAllocation(
5277 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5278 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5283 const bool canCreateNewBlock =
5285 (m_Blocks.size() < m_MaxBlockCount);
5288 if(canCreateNewBlock)
5291 VkDeviceSize blockSize = m_PreferredBlockSize;
5292 size_t newBlockIndex = 0;
5293 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5296 if(res < 0 && m_IsCustomPool ==
false)
5300 if(blockSize >= vkMemReq.size)
5302 res = CreateBlock(blockSize, &newBlockIndex);
5307 if(blockSize >= vkMemReq.size)
5309 res = CreateBlock(blockSize, &newBlockIndex);
5314 if(res == VK_SUCCESS)
5316 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5317 VMA_ASSERT(pBlock->m_Size >= vkMemReq.size);
5320 VmaAllocationRequest allocRequest = {};
5321 allocRequest.item = pBlock->m_Suballocations.begin();
5322 allocRequest.offset = 0;
5323 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5324 pBlock->Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5325 (*pAllocation)->InitBlockAllocation(
5328 allocRequest.offset,
5334 VMA_HEAVY_ASSERT(pBlock->Validate());
5335 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5344 if(canMakeOtherLost)
5346 uint32_t tryIndex = 0;
5347 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5349 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5350 VmaAllocationRequest bestRequest = {};
5351 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5355 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5357 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5358 VMA_ASSERT(pCurrBlock);
5359 VmaAllocationRequest currRequest = {};
5360 if(pCurrBlock->CreateAllocationRequest(
5363 m_BufferImageGranularity,
5370 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5371 if(pBestRequestBlock == VMA_NULL ||
5372 currRequestCost < bestRequestCost)
5374 pBestRequestBlock = pCurrBlock;
5375 bestRequest = currRequest;
5376 bestRequestCost = currRequestCost;
5378 if(bestRequestCost == 0)
5386 if(pBestRequestBlock != VMA_NULL)
5388 if(pBestRequestBlock->MakeRequestedAllocationsLost(
5394 if(pBestRequestBlock->IsEmpty())
5396 m_HasEmptyBlock =
false;
5399 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5400 pBestRequestBlock->Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5401 (*pAllocation)->InitBlockAllocation(
5410 VMA_HEAVY_ASSERT(pBlock->Validate());
5411 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5425 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5427 return VK_ERROR_TOO_MANY_OBJECTS;
5431 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5434 void VmaBlockVector::Free(
5435 VmaAllocation hAllocation)
5437 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5441 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5443 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5445 pBlock->Free(hAllocation);
5446 VMA_HEAVY_ASSERT(pBlock->Validate());
5448 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5451 if(pBlock->IsEmpty())
5454 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5456 pBlockToDelete = pBlock;
5462 m_HasEmptyBlock =
true;
5466 IncrementallySortBlocks();
5471 if(pBlockToDelete != VMA_NULL)
5473 VMA_DEBUG_LOG(
" Deleted empty allocation");
5474 pBlockToDelete->Destroy(m_hAllocator);
5475 vma_delete(m_hAllocator, pBlockToDelete);
5479 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5481 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5483 if(m_Blocks[blockIndex] == pBlock)
5485 VmaVectorRemove(m_Blocks, blockIndex);
5492 void VmaBlockVector::IncrementallySortBlocks()
5495 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5497 if(m_Blocks[i - 1]->m_SumFreeSize > m_Blocks[i]->m_SumFreeSize)
5499 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5505 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5507 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5508 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5509 allocInfo.allocationSize = blockSize;
5510 VkDeviceMemory mem = VK_NULL_HANDLE;
5511 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
5520 void* pMappedData = VMA_NULL;
5521 const bool persistentMap = (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED);
5522 if(persistentMap && m_hAllocator->m_UnmapPersistentlyMappedMemoryCounter == 0)
5524 res = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5525 m_hAllocator->m_hDevice,
5533 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
5534 m_hAllocator->FreeVulkanMemory(m_MemoryTypeIndex, blockSize, mem);
5540 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5543 (VMA_BLOCK_VECTOR_TYPE)m_BlockVectorType,
5545 allocInfo.allocationSize,
5549 m_Blocks.push_back(pBlock);
5550 if(pNewBlockIndex != VMA_NULL)
5552 *pNewBlockIndex = m_Blocks.size() - 1;
5558 #if VMA_STATS_STRING_ENABLED 5560 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5562 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5568 json.WriteString(
"MemoryTypeIndex");
5569 json.WriteNumber(m_MemoryTypeIndex);
5571 if(m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
5573 json.WriteString(
"Mapped");
5574 json.WriteBool(
true);
5577 json.WriteString(
"BlockSize");
5578 json.WriteNumber(m_PreferredBlockSize);
5580 json.WriteString(
"BlockCount");
5581 json.BeginObject(
true);
5582 if(m_MinBlockCount > 0)
5584 json.WriteString(
"Min");
5585 json.WriteNumber(m_MinBlockCount);
5587 if(m_MaxBlockCount < SIZE_MAX)
5589 json.WriteString(
"Max");
5590 json.WriteNumber(m_MaxBlockCount);
5592 json.WriteString(
"Cur");
5593 json.WriteNumber(m_Blocks.size());
5596 if(m_FrameInUseCount > 0)
5598 json.WriteString(
"FrameInUseCount");
5599 json.WriteNumber(m_FrameInUseCount);
5604 json.WriteString(
"PreferredBlockSize");
5605 json.WriteNumber(m_PreferredBlockSize);
5608 json.WriteString(
"Blocks");
5610 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5612 m_Blocks[i]->PrintDetailedMap(json);
5619 #endif // #if VMA_STATS_STRING_ENABLED 5621 void VmaBlockVector::UnmapPersistentlyMappedMemory()
5623 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5625 for(
size_t i = m_Blocks.size(); i--; )
5627 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5628 if(pBlock->m_pMappedData != VMA_NULL)
5630 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
5631 (m_hAllocator->GetVulkanFunctions().vkUnmapMemory)(m_hAllocator->m_hDevice, pBlock->m_hMemory);
5632 pBlock->m_pMappedData = VMA_NULL;
5637 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
5639 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5641 VkResult finalResult = VK_SUCCESS;
5642 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
5644 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5645 if(pBlock->m_PersistentMap)
5647 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
5648 VkResult localResult = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5649 m_hAllocator->m_hDevice,
5654 &pBlock->m_pMappedData);
5655 if(localResult != VK_SUCCESS)
5657 finalResult = localResult;
5664 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5665 VmaAllocator hAllocator,
5666 uint32_t currentFrameIndex)
5668 if(m_pDefragmentator == VMA_NULL)
5670 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5676 return m_pDefragmentator;
5679 VkResult VmaBlockVector::Defragment(
5681 VkDeviceSize& maxBytesToMove,
5682 uint32_t& maxAllocationsToMove)
5684 if(m_pDefragmentator == VMA_NULL)
5689 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5692 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5695 if(pDefragmentationStats != VMA_NULL)
5697 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5698 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
5701 VMA_ASSERT(bytesMoved <= maxBytesToMove);
5702 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
5708 m_HasEmptyBlock =
false;
5709 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
5711 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
5712 if(pBlock->IsEmpty())
5714 if(m_Blocks.size() > m_MinBlockCount)
5716 if(pDefragmentationStats != VMA_NULL)
5719 pDefragmentationStats->
bytesFreed += pBlock->m_Size;
5722 VmaVectorRemove(m_Blocks, blockIndex);
5723 pBlock->Destroy(m_hAllocator);
5724 vma_delete(m_hAllocator, pBlock);
5728 m_HasEmptyBlock =
true;
5736 void VmaBlockVector::DestroyDefragmentator()
5738 if(m_pDefragmentator != VMA_NULL)
5740 vma_delete(m_hAllocator, m_pDefragmentator);
5741 m_pDefragmentator = VMA_NULL;
5745 void VmaBlockVector::MakePoolAllocationsLost(
5746 uint32_t currentFrameIndex,
5747 size_t* pLostAllocationCount)
5749 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5751 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5753 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5755 pBlock->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
5759 void VmaBlockVector::AddStats(
VmaStats* pStats)
5761 const uint32_t memTypeIndex = m_MemoryTypeIndex;
5762 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
5764 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5766 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5768 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5770 VMA_HEAVY_ASSERT(pBlock->Validate());
5772 CalcAllocationStatInfo(allocationStatInfo, *pBlock);
5773 VmaAddStatInfo(pStats->
total, allocationStatInfo);
5774 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
5775 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
5782 VmaDefragmentator::VmaDefragmentator(
5783 VmaAllocator hAllocator,
5784 VmaBlockVector* pBlockVector,
5785 uint32_t currentFrameIndex) :
5786 m_hAllocator(hAllocator),
5787 m_pBlockVector(pBlockVector),
5788 m_CurrentFrameIndex(currentFrameIndex),
5790 m_AllocationsMoved(0),
5791 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
5792 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
5796 VmaDefragmentator::~VmaDefragmentator()
5798 for(
size_t i = m_Blocks.size(); i--; )
5800 vma_delete(m_hAllocator, m_Blocks[i]);
5804 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
5806 AllocationInfo allocInfo;
5807 allocInfo.m_hAllocation = hAlloc;
5808 allocInfo.m_pChanged = pChanged;
5809 m_Allocations.push_back(allocInfo);
5812 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
5815 if(m_pMappedDataForDefragmentation)
5817 *ppMappedData = m_pMappedDataForDefragmentation;
5822 if(m_pBlock->m_PersistentMap)
5824 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
5825 *ppMappedData = m_pBlock->m_pMappedData;
5830 VkResult res = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5831 hAllocator->m_hDevice,
5832 m_pBlock->m_hMemory,
5836 &m_pMappedDataForDefragmentation);
5837 *ppMappedData = m_pMappedDataForDefragmentation;
5841 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
5843 if(m_pMappedDataForDefragmentation != VMA_NULL)
5845 (hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_pBlock->m_hMemory);
5849 VkResult VmaDefragmentator::DefragmentRound(
5850 VkDeviceSize maxBytesToMove,
5851 uint32_t maxAllocationsToMove)
5853 if(m_Blocks.empty())
5858 size_t srcBlockIndex = m_Blocks.size() - 1;
5859 size_t srcAllocIndex = SIZE_MAX;
5865 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
5867 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
5870 if(srcBlockIndex == 0)
5877 srcAllocIndex = SIZE_MAX;
5882 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
5886 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
5887 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
5889 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
5890 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
5891 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
5892 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
5895 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
5897 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
5898 VmaAllocationRequest dstAllocRequest;
5899 if(pDstBlockInfo->m_pBlock->CreateAllocationRequest(
5900 m_CurrentFrameIndex,
5901 m_pBlockVector->GetFrameInUseCount(),
5902 m_pBlockVector->GetBufferImageGranularity(),
5907 &dstAllocRequest) &&
5909 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
5911 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
5914 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
5915 (m_BytesMoved + size > maxBytesToMove))
5917 return VK_INCOMPLETE;
5920 void* pDstMappedData = VMA_NULL;
5921 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
5922 if(res != VK_SUCCESS)
5927 void* pSrcMappedData = VMA_NULL;
5928 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
5929 if(res != VK_SUCCESS)
5936 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
5937 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
5938 static_cast<size_t>(size));
5940 pDstBlockInfo->m_pBlock->Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
5941 pSrcBlockInfo->m_pBlock->Free(allocInfo.m_hAllocation);
5943 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
5945 if(allocInfo.m_pChanged != VMA_NULL)
5947 *allocInfo.m_pChanged = VK_TRUE;
5950 ++m_AllocationsMoved;
5951 m_BytesMoved += size;
5953 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
5961 if(srcAllocIndex > 0)
5967 if(srcBlockIndex > 0)
5970 srcAllocIndex = SIZE_MAX;
5980 VkResult VmaDefragmentator::Defragment(
5981 VkDeviceSize maxBytesToMove,
5982 uint32_t maxAllocationsToMove)
5984 if(m_Allocations.empty())
5990 const size_t blockCount = m_pBlockVector->m_Blocks.size();
5991 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
5993 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
5994 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
5995 m_Blocks.push_back(pBlockInfo);
5999 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6002 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6004 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6006 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6008 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6009 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6010 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6012 (*it)->m_Allocations.push_back(allocInfo);
6020 m_Allocations.clear();
6022 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6024 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6025 pBlockInfo->CalcHasNonMovableAllocations();
6026 pBlockInfo->SortAllocationsBySizeDescecnding();
6030 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6033 VkResult result = VK_SUCCESS;
6034 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6036 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6040 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6042 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6048 bool VmaDefragmentator::MoveMakesSense(
6049 size_t dstBlockIndex, VkDeviceSize dstOffset,
6050 size_t srcBlockIndex, VkDeviceSize srcOffset)
6052 if(dstBlockIndex < srcBlockIndex)
6056 if(dstBlockIndex > srcBlockIndex)
6060 if(dstOffset < srcOffset)
6072 m_PhysicalDevice(pCreateInfo->physicalDevice),
6073 m_hDevice(pCreateInfo->device),
6074 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6075 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6076 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6077 m_UnmapPersistentlyMappedMemoryCounter(0),
6078 m_PreferredLargeHeapBlockSize(0),
6079 m_PreferredSmallHeapBlockSize(0),
6080 m_CurrentFrameIndex(0),
6081 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6085 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6086 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6087 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6089 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6090 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
6092 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6094 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6105 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6106 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6115 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6117 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6118 if(limit != VK_WHOLE_SIZE)
6120 m_HeapSizeLimit[heapIndex] = limit;
6121 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6123 m_MemProps.memoryHeaps[heapIndex].size = limit;
6129 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6131 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6133 for(
size_t blockVectorTypeIndex = 0; blockVectorTypeIndex < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorTypeIndex)
6135 m_pBlockVectors[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, VmaBlockVector)(
6138 static_cast<VMA_BLOCK_VECTOR_TYPE
>(blockVectorTypeIndex),
6142 GetBufferImageGranularity(),
6147 m_pOwnAllocations[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6152 VmaAllocator_T::~VmaAllocator_T()
6154 VMA_ASSERT(m_Pools.empty());
6156 for(
size_t i = GetMemoryTypeCount(); i--; )
6158 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
6160 vma_delete(
this, m_pOwnAllocations[i][j]);
6161 vma_delete(
this, m_pBlockVectors[i][j]);
6166 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6168 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6169 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6170 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6171 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6172 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6173 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6174 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6175 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6176 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6177 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6178 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6179 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6180 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6181 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6182 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6183 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6185 if(pVulkanFunctions != VMA_NULL)
6187 m_VulkanFunctions = *pVulkanFunctions;
6192 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6193 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6194 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6195 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6196 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6197 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6198 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6199 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6200 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6201 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6202 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6203 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6204 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6205 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6208 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6210 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6211 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6212 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6213 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6216 VkResult VmaAllocator_T::AllocateMemoryOfType(
6217 const VkMemoryRequirements& vkMemReq,
6219 uint32_t memTypeIndex,
6220 VmaSuballocationType suballocType,
6221 VmaAllocation* pAllocation)
6223 VMA_ASSERT(pAllocation != VMA_NULL);
6224 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6226 uint32_t blockVectorType = VmaAllocationCreateFlagsToBlockVectorType(createInfo.
flags);
6227 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6228 VMA_ASSERT(blockVector);
6230 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6232 const bool ownMemory =
6234 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
6236 vkMemReq.size > preferredBlockSize / 2);
6242 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6246 return AllocateOwnMemory(
6257 VkResult res = blockVector->Allocate(
6259 m_CurrentFrameIndex.load(),
6264 if(res == VK_SUCCESS)
6270 res = AllocateOwnMemory(
6275 createInfo.pUserData,
6277 if(res == VK_SUCCESS)
6280 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
6286 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6292 VkResult VmaAllocator_T::AllocateOwnMemory(
6294 VmaSuballocationType suballocType,
6295 uint32_t memTypeIndex,
6298 VmaAllocation* pAllocation)
6300 VMA_ASSERT(pAllocation);
6302 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6303 allocInfo.memoryTypeIndex = memTypeIndex;
6304 allocInfo.allocationSize = size;
6307 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6308 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6311 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6315 void* pMappedData =
nullptr;
6318 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
6320 res = vkMapMemory(m_hDevice, hMemory, 0, VK_WHOLE_SIZE, 0, &pMappedData);
6323 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6324 FreeVulkanMemory(memTypeIndex, size, hMemory);
6330 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6331 (*pAllocation)->InitOwnAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
6335 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6336 AllocationVectorType* pOwnAllocations = m_pOwnAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
6337 VMA_ASSERT(pOwnAllocations);
6338 VmaVectorInsertSorted<VmaPointerLess>(*pOwnAllocations, *pAllocation);
6341 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
6346 VkResult VmaAllocator_T::AllocateMemory(
6347 const VkMemoryRequirements& vkMemReq,
6349 VmaSuballocationType suballocType,
6350 VmaAllocation* pAllocation)
6355 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6356 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6358 if((createInfo.
pool != VK_NULL_HANDLE) &&
6361 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT when pool != null is invalid.");
6362 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6365 if(createInfo.
pool != VK_NULL_HANDLE)
6367 return createInfo.
pool->m_BlockVector.Allocate(
6369 m_CurrentFrameIndex.load(),
6378 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6379 uint32_t memTypeIndex = UINT32_MAX;
6381 if(res == VK_SUCCESS)
6383 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6385 if(res == VK_SUCCESS)
6395 memoryTypeBits &= ~(1u << memTypeIndex);
6398 if(res == VK_SUCCESS)
6400 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6402 if(res == VK_SUCCESS)
6412 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6423 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6425 VMA_ASSERT(allocation);
6427 if(allocation->CanBecomeLost() ==
false ||
6428 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6430 switch(allocation->GetType())
6432 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6434 VmaBlockVector* pBlockVector = VMA_NULL;
6435 VmaPool hPool = allocation->GetPool();
6436 if(hPool != VK_NULL_HANDLE)
6438 pBlockVector = &hPool->m_BlockVector;
6442 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6443 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
6444 pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6446 pBlockVector->Free(allocation);
6449 case VmaAllocation_T::ALLOCATION_TYPE_OWN:
6450 FreeOwnMemory(allocation);
6457 vma_delete(
this, allocation);
6460 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6463 InitStatInfo(pStats->
total);
6464 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6466 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6470 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6472 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6473 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6475 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6476 VMA_ASSERT(pBlockVector);
6477 pBlockVector->AddStats(pStats);
6483 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6484 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6486 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
6491 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6493 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6494 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6495 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6497 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6498 VMA_ASSERT(pOwnAllocVector);
6499 for(
size_t allocIndex = 0, allocCount = pOwnAllocVector->size(); allocIndex < allocCount; ++allocIndex)
6502 (*pOwnAllocVector)[allocIndex]->OwnAllocCalcStatsInfo(allocationStatInfo);
6503 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6504 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6505 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6511 VmaPostprocessCalcStatInfo(pStats->
total);
6512 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6513 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6514 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6515 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6518 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6520 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
6522 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
6524 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6526 for(uint32_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
6528 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6529 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6530 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6534 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6535 AllocationVectorType* pOwnAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6536 for(
size_t ownAllocIndex = pOwnAllocationsVector->size(); ownAllocIndex--; )
6538 VmaAllocation hAlloc = (*pOwnAllocationsVector)[ownAllocIndex];
6539 hAlloc->OwnAllocUnmapPersistentlyMappedMemory(
this);
6545 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6546 pBlockVector->UnmapPersistentlyMappedMemory();
6553 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6554 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6556 m_Pools[poolIndex]->GetBlockVector().UnmapPersistentlyMappedMemory();
6563 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
6565 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
6566 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
6568 VkResult finalResult = VK_SUCCESS;
6569 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6573 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6574 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6576 m_Pools[poolIndex]->GetBlockVector().MapPersistentlyMappedMemory();
6580 for(uint32_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
6582 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6583 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6584 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6588 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6589 AllocationVectorType* pAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6590 for(
size_t ownAllocIndex = 0, ownAllocCount = pAllocationsVector->size(); ownAllocIndex < ownAllocCount; ++ownAllocIndex)
6592 VmaAllocation hAlloc = (*pAllocationsVector)[ownAllocIndex];
6593 hAlloc->OwnAllocMapPersistentlyMappedMemory(
this);
6599 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6600 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
6601 if(localResult != VK_SUCCESS)
6603 finalResult = localResult;
6615 VkResult VmaAllocator_T::Defragment(
6616 VmaAllocation* pAllocations,
6617 size_t allocationCount,
6618 VkBool32* pAllocationsChanged,
6622 if(pAllocationsChanged != VMA_NULL)
6624 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
6626 if(pDefragmentationStats != VMA_NULL)
6628 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
6631 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
6633 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
6634 return VK_ERROR_MEMORY_MAP_FAILED;
6637 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
6639 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
6641 const size_t poolCount = m_Pools.size();
6644 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
6646 VmaAllocation hAlloc = pAllocations[allocIndex];
6648 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
6650 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
6652 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
6654 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
6656 VmaBlockVector* pAllocBlockVector =
nullptr;
6658 const VmaPool hAllocPool = hAlloc->GetPool();
6660 if(hAllocPool != VK_NULL_HANDLE)
6662 pAllocBlockVector = &hAllocPool->GetBlockVector();
6667 pAllocBlockVector = m_pBlockVectors[memTypeIndex][hAlloc->GetBlockVectorType()];
6670 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
6672 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
6673 &pAllocationsChanged[allocIndex] : VMA_NULL;
6674 pDefragmentator->AddAllocation(hAlloc, pChanged);
6678 VkResult result = VK_SUCCESS;
6682 VkDeviceSize maxBytesToMove = SIZE_MAX;
6683 uint32_t maxAllocationsToMove = UINT32_MAX;
6684 if(pDefragmentationInfo != VMA_NULL)
6691 for(uint32_t memTypeIndex = 0;
6692 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
6696 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6698 for(uint32_t blockVectorType = 0;
6699 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
6702 result = m_pBlockVectors[memTypeIndex][blockVectorType]->Defragment(
6703 pDefragmentationStats,
6705 maxAllocationsToMove);
6711 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
6713 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
6714 pDefragmentationStats,
6716 maxAllocationsToMove);
6722 for(
size_t poolIndex = poolCount; poolIndex--; )
6724 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
6728 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
6730 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6732 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
6734 m_pBlockVectors[memTypeIndex][blockVectorType]->DestroyDefragmentator();
6742 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
6744 if(hAllocation->CanBecomeLost())
6750 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
6751 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
6754 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6758 pAllocationInfo->
offset = 0;
6759 pAllocationInfo->
size = hAllocation->GetSize();
6761 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6764 else if(localLastUseFrameIndex == localCurrFrameIndex)
6766 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6767 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6768 pAllocationInfo->
offset = hAllocation->GetOffset();
6769 pAllocationInfo->
size = hAllocation->GetSize();
6770 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6771 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6776 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
6778 localLastUseFrameIndex = localCurrFrameIndex;
6786 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6787 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6788 pAllocationInfo->
offset = hAllocation->GetOffset();
6789 pAllocationInfo->
size = hAllocation->GetSize();
6790 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6791 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6795 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
6797 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
6810 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
6812 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
6813 if(res != VK_SUCCESS)
6815 vma_delete(
this, *pPool);
6822 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6823 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
6829 void VmaAllocator_T::DestroyPool(VmaPool pool)
6833 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6834 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
6835 VMA_ASSERT(success &&
"Pool not found in Allocator.");
6838 vma_delete(
this, pool);
6841 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
6843 pool->m_BlockVector.GetPoolStats(pPoolStats);
6846 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
6848 m_CurrentFrameIndex.store(frameIndex);
6851 void VmaAllocator_T::MakePoolAllocationsLost(
6853 size_t* pLostAllocationCount)
6855 hPool->m_BlockVector.MakePoolAllocationsLost(
6856 m_CurrentFrameIndex.load(),
6857 pLostAllocationCount);
6860 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
6862 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
6863 (*pAllocation)->InitLost();
6866 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
6868 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
6871 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6873 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6874 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
6876 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6877 if(res == VK_SUCCESS)
6879 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
6884 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
6889 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6892 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
6894 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
6900 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
6902 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
6904 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
6907 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
6909 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
6910 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6912 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6913 m_HeapSizeLimit[heapIndex] += size;
6917 void VmaAllocator_T::FreeOwnMemory(VmaAllocation allocation)
6919 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_OWN);
6921 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6923 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6924 AllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex][allocation->GetBlockVectorType()];
6925 VMA_ASSERT(pOwnAllocations);
6926 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pOwnAllocations, allocation);
6927 VMA_ASSERT(success);
6930 VkDeviceMemory hMemory = allocation->GetMemory();
6932 if(allocation->GetMappedData() != VMA_NULL)
6934 vkUnmapMemory(m_hDevice, hMemory);
6937 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
6939 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
6942 #if VMA_STATS_STRING_ENABLED 6944 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
6946 bool ownAllocationsStarted =
false;
6947 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6949 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6950 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6952 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6953 VMA_ASSERT(pOwnAllocVector);
6954 if(pOwnAllocVector->empty() ==
false)
6956 if(ownAllocationsStarted ==
false)
6958 ownAllocationsStarted =
true;
6959 json.WriteString(
"OwnAllocations");
6963 json.BeginString(
"Type ");
6964 json.ContinueString(memTypeIndex);
6965 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
6967 json.ContinueString(
" Mapped");
6973 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
6975 const VmaAllocation hAlloc = (*pOwnAllocVector)[i];
6976 json.BeginObject(
true);
6978 json.WriteString(
"Size");
6979 json.WriteNumber(hAlloc->GetSize());
6981 json.WriteString(
"Type");
6982 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
6991 if(ownAllocationsStarted)
6997 bool allocationsStarted =
false;
6998 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7000 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7002 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
7004 if(allocationsStarted ==
false)
7006 allocationsStarted =
true;
7007 json.WriteString(
"DefaultPools");
7011 json.BeginString(
"Type ");
7012 json.ContinueString(memTypeIndex);
7013 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7015 json.ContinueString(
" Mapped");
7019 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(json);
7023 if(allocationsStarted)
7030 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7031 const size_t poolCount = m_Pools.size();
7034 json.WriteString(
"Pools");
7036 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7038 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7045 #endif // #if VMA_STATS_STRING_ENABLED 7047 static VkResult AllocateMemoryForImage(
7048 VmaAllocator allocator,
7051 VmaSuballocationType suballocType,
7052 VmaAllocation* pAllocation)
7054 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7056 VkMemoryRequirements vkMemReq = {};
7057 (*allocator->GetVulkanFunctions().vkGetImageMemoryRequirements)(allocator->m_hDevice, image, &vkMemReq);
7059 return allocator->AllocateMemory(
7061 *pAllocationCreateInfo,
7071 VmaAllocator* pAllocator)
7073 VMA_ASSERT(pCreateInfo && pAllocator);
7074 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7080 VmaAllocator allocator)
7082 if(allocator != VK_NULL_HANDLE)
7084 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7085 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7086 vma_delete(&allocationCallbacks, allocator);
7091 VmaAllocator allocator,
7092 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7094 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7095 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7099 VmaAllocator allocator,
7100 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7102 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7103 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7107 VmaAllocator allocator,
7108 uint32_t memoryTypeIndex,
7109 VkMemoryPropertyFlags* pFlags)
7111 VMA_ASSERT(allocator && pFlags);
7112 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7113 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7117 VmaAllocator allocator,
7118 uint32_t frameIndex)
7120 VMA_ASSERT(allocator);
7121 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7123 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7125 allocator->SetCurrentFrameIndex(frameIndex);
7129 VmaAllocator allocator,
7132 VMA_ASSERT(allocator && pStats);
7133 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7134 allocator->CalculateStats(pStats);
7137 #if VMA_STATS_STRING_ENABLED 7140 VmaAllocator allocator,
7141 char** ppStatsString,
7142 VkBool32 detailedMap)
7144 VMA_ASSERT(allocator && ppStatsString);
7145 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7147 VmaStringBuilder sb(allocator);
7149 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7153 allocator->CalculateStats(&stats);
7155 json.WriteString(
"Total");
7156 VmaPrintStatInfo(json, stats.
total);
7158 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7160 json.BeginString(
"Heap ");
7161 json.ContinueString(heapIndex);
7165 json.WriteString(
"Size");
7166 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7168 json.WriteString(
"Flags");
7169 json.BeginArray(
true);
7170 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7172 json.WriteString(
"DEVICE_LOCAL");
7178 json.WriteString(
"Stats");
7179 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7182 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7184 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7186 json.BeginString(
"Type ");
7187 json.ContinueString(typeIndex);
7192 json.WriteString(
"Flags");
7193 json.BeginArray(
true);
7194 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7195 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7197 json.WriteString(
"DEVICE_LOCAL");
7199 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7201 json.WriteString(
"HOST_VISIBLE");
7203 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7205 json.WriteString(
"HOST_COHERENT");
7207 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7209 json.WriteString(
"HOST_CACHED");
7211 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7213 json.WriteString(
"LAZILY_ALLOCATED");
7219 json.WriteString(
"Stats");
7220 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7229 if(detailedMap == VK_TRUE)
7231 allocator->PrintDetailedMap(json);
7237 const size_t len = sb.GetLength();
7238 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7241 memcpy(pChars, sb.GetData(), len);
7244 *ppStatsString = pChars;
7248 VmaAllocator allocator,
7251 if(pStatsString != VMA_NULL)
7253 VMA_ASSERT(allocator);
7254 size_t len = strlen(pStatsString);
7255 vma_delete_array(allocator, pStatsString, len + 1);
7259 #endif // #if VMA_STATS_STRING_ENABLED 7264 VmaAllocator allocator,
7265 uint32_t memoryTypeBits,
7267 uint32_t* pMemoryTypeIndex)
7269 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7270 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7271 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7273 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7275 if(preferredFlags == 0)
7277 preferredFlags = requiredFlags;
7280 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7283 switch(pAllocationCreateInfo->
usage)
7288 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7291 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7294 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7295 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7298 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7299 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7307 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7310 *pMemoryTypeIndex = UINT32_MAX;
7311 uint32_t minCost = UINT32_MAX;
7312 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7313 memTypeIndex < allocator->GetMemoryTypeCount();
7314 ++memTypeIndex, memTypeBit <<= 1)
7317 if((memTypeBit & memoryTypeBits) != 0)
7319 const VkMemoryPropertyFlags currFlags =
7320 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7322 if((requiredFlags & ~currFlags) == 0)
7325 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7327 if(currCost < minCost)
7329 *pMemoryTypeIndex = memTypeIndex;
7339 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7343 VmaAllocator allocator,
7347 VMA_ASSERT(allocator && pCreateInfo && pPool);
7349 VMA_DEBUG_LOG(
"vmaCreatePool");
7351 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7353 return allocator->CreatePool(pCreateInfo, pPool);
7357 VmaAllocator allocator,
7360 VMA_ASSERT(allocator && pool);
7362 VMA_DEBUG_LOG(
"vmaDestroyPool");
7364 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7366 allocator->DestroyPool(pool);
7370 VmaAllocator allocator,
7374 VMA_ASSERT(allocator && pool && pPoolStats);
7376 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7378 allocator->GetPoolStats(pool, pPoolStats);
7382 VmaAllocator allocator,
7384 size_t* pLostAllocationCount)
7386 VMA_ASSERT(allocator && pool);
7388 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7390 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7394 VmaAllocator allocator,
7395 const VkMemoryRequirements* pVkMemoryRequirements,
7397 VmaAllocation* pAllocation,
7400 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7402 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7404 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7406 VkResult result = allocator->AllocateMemory(
7407 *pVkMemoryRequirements,
7409 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7412 if(pAllocationInfo && result == VK_SUCCESS)
7414 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7421 VmaAllocator allocator,
7424 VmaAllocation* pAllocation,
7427 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7429 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7431 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7433 VkMemoryRequirements vkMemReq = {};
7434 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, buffer, &vkMemReq);
7436 VkResult result = allocator->AllocateMemory(
7439 VMA_SUBALLOCATION_TYPE_BUFFER,
7442 if(pAllocationInfo && result == VK_SUCCESS)
7444 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7451 VmaAllocator allocator,
7454 VmaAllocation* pAllocation,
7457 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7459 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7461 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7463 VkResult result = AllocateMemoryForImage(
7467 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7470 if(pAllocationInfo && result == VK_SUCCESS)
7472 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7479 VmaAllocator allocator,
7480 VmaAllocation allocation)
7482 VMA_ASSERT(allocator && allocation);
7484 VMA_DEBUG_LOG(
"vmaFreeMemory");
7486 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7488 allocator->FreeMemory(allocation);
7492 VmaAllocator allocator,
7493 VmaAllocation allocation,
7496 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7498 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7500 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7504 VmaAllocator allocator,
7505 VmaAllocation allocation,
7508 VMA_ASSERT(allocator && allocation);
7510 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7512 allocation->SetUserData(pUserData);
7516 VmaAllocator allocator,
7517 VmaAllocation* pAllocation)
7519 VMA_ASSERT(allocator && pAllocation);
7521 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7523 allocator->CreateLostAllocation(pAllocation);
7527 VmaAllocator allocator,
7528 VmaAllocation allocation,
7531 VMA_ASSERT(allocator && allocation && ppData);
7533 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7535 return vkMapMemory(allocator->m_hDevice, allocation->GetMemory(),
7536 allocation->GetOffset(), allocation->GetSize(), 0, ppData);
7540 VmaAllocator allocator,
7541 VmaAllocation allocation)
7543 VMA_ASSERT(allocator && allocation);
7545 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7547 vkUnmapMemory(allocator->m_hDevice, allocation->GetMemory());
7552 VMA_ASSERT(allocator);
7554 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7556 allocator->UnmapPersistentlyMappedMemory();
7561 VMA_ASSERT(allocator);
7563 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7565 return allocator->MapPersistentlyMappedMemory();
7569 VmaAllocator allocator,
7570 VmaAllocation* pAllocations,
7571 size_t allocationCount,
7572 VkBool32* pAllocationsChanged,
7576 VMA_ASSERT(allocator && pAllocations);
7578 VMA_DEBUG_LOG(
"vmaDefragment");
7580 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7582 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
7586 VmaAllocator allocator,
7587 const VkBufferCreateInfo* pBufferCreateInfo,
7590 VmaAllocation* pAllocation,
7593 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
7595 VMA_DEBUG_LOG(
"vmaCreateBuffer");
7597 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7599 *pBuffer = VK_NULL_HANDLE;
7600 *pAllocation = VK_NULL_HANDLE;
7603 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
7604 allocator->m_hDevice,
7606 allocator->GetAllocationCallbacks(),
7611 VkMemoryRequirements vkMemReq = {};
7612 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, *pBuffer, &vkMemReq);
7615 res = allocator->AllocateMemory(
7617 *pAllocationCreateInfo,
7618 VMA_SUBALLOCATION_TYPE_BUFFER,
7623 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
7624 allocator->m_hDevice,
7626 (*pAllocation)->GetMemory(),
7627 (*pAllocation)->GetOffset());
7631 if(pAllocationInfo != VMA_NULL)
7633 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7637 allocator->FreeMemory(*pAllocation);
7638 *pAllocation = VK_NULL_HANDLE;
7641 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
7642 *pBuffer = VK_NULL_HANDLE;
7649 VmaAllocator allocator,
7651 VmaAllocation allocation)
7653 if(buffer != VK_NULL_HANDLE)
7655 VMA_ASSERT(allocator);
7657 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
7659 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7661 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
7663 allocator->FreeMemory(allocation);
7668 VmaAllocator allocator,
7669 const VkImageCreateInfo* pImageCreateInfo,
7672 VmaAllocation* pAllocation,
7675 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
7677 VMA_DEBUG_LOG(
"vmaCreateImage");
7679 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7681 *pImage = VK_NULL_HANDLE;
7682 *pAllocation = VK_NULL_HANDLE;
7685 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
7686 allocator->m_hDevice,
7688 allocator->GetAllocationCallbacks(),
7692 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
7693 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
7694 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
7697 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
7701 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
7702 allocator->m_hDevice,
7704 (*pAllocation)->GetMemory(),
7705 (*pAllocation)->GetOffset());
7709 if(pAllocationInfo != VMA_NULL)
7711 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7715 allocator->FreeMemory(*pAllocation);
7716 *pAllocation = VK_NULL_HANDLE;
7719 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
7720 *pImage = VK_NULL_HANDLE;
7727 VmaAllocator allocator,
7729 VmaAllocation allocation)
7731 if(image != VK_NULL_HANDLE)
7733 VMA_ASSERT(allocator);
7735 VMA_DEBUG_LOG(
"vmaDestroyImage");
7737 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7739 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
7741 allocator->FreeMemory(allocation);
7745 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:476
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:499
Definition: vk_mem_alloc.h:828
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
uint32_t BlockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:612
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:486
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:679
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:480
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:949
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1099
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:880
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:728
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:761
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:445
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:511
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:830
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:558
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:493
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:508
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:483
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:473
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1103
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:528
VmaStatInfo total
Definition: vk_mem_alloc.h:630
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1111
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:744
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1094
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:484
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:502
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:834
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:959
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:481
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:763
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:850
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:886
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:837
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
struct VmaVulkanFunctions VmaVulkanFunctions
Definition: vk_mem_alloc.h:737
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1089
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VkDeviceSize AllocationSizeMax
Definition: vk_mem_alloc.h:621
Definition: vk_mem_alloc.h:808
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1107
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:482
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:626
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:717
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1109
VmaMemoryUsage
Definition: vk_mem_alloc.h:665
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:755
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:469
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VmaAllocatorFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:464
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:477
Definition: vk_mem_alloc.h:609
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:845
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:456
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:460
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:840
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:622
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:439
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:750
Definition: vk_mem_alloc.h:741
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:479
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:858
VkDeviceSize AllocationSizeMin
Definition: vk_mem_alloc.h:621
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:514
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:889
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:768
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:546
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:628
VkDeviceSize AllocationSizeAvg
Definition: vk_mem_alloc.h:621
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:488
uint32_t AllocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:614
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:458
Definition: vk_mem_alloc.h:735
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:487
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:872
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:496
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
VkDeviceSize UsedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:618
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:970
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:696
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:505
uint32_t UnusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:616
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:877
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:673
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaStats VmaStats
General statistics from current state of Allocator.
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:622
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:954
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1105
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
Definition: vk_mem_alloc.h:475
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:739
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:485
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:489
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:799
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:965
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
No intended memory usage specified.
Definition: vk_mem_alloc.h:668
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:478
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
Definition: vk_mem_alloc.h:680
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:935
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:676
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:684
Definition: vk_mem_alloc.h:471
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:707
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:670
struct VmaStatInfo VmaStatInfo
VkDeviceSize UnusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:620
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:629
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:883
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:826
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:622
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:940
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.