23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 420 #include <vulkan/vulkan.h> 427 VK_DEFINE_HANDLE(VmaAllocator)
431 VmaAllocator allocator,
433 VkDeviceMemory memory,
437 VmaAllocator allocator,
439 VkDeviceMemory memory,
526 VmaAllocator* pAllocator);
530 VmaAllocator allocator);
537 VmaAllocator allocator,
538 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
545 VmaAllocator allocator,
546 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
555 VmaAllocator allocator,
556 uint32_t memoryTypeIndex,
557 VkMemoryPropertyFlags* pFlags);
568 VmaAllocator allocator,
569 uint32_t frameIndex);
597 VmaAllocator allocator,
600 #define VMA_STATS_STRING_ENABLED 1 602 #if VMA_STATS_STRING_ENABLED 608 VmaAllocator allocator,
609 char** ppStatsString,
610 VkBool32 detailedMap);
613 VmaAllocator allocator,
616 #endif // #if VMA_STATS_STRING_ENABLED 625 VK_DEFINE_HANDLE(VmaPool)
748 VmaAllocator allocator,
749 uint32_t memoryTypeBits,
751 uint32_t* pMemoryTypeIndex);
861 VmaAllocator allocator,
868 VmaAllocator allocator,
878 VmaAllocator allocator,
889 VmaAllocator allocator,
891 size_t* pLostAllocationCount);
893 VK_DEFINE_HANDLE(VmaAllocation)
946 VmaAllocator allocator,
947 const VkMemoryRequirements* pVkMemoryRequirements,
949 VmaAllocation* pAllocation,
959 VmaAllocator allocator,
962 VmaAllocation* pAllocation,
967 VmaAllocator allocator,
970 VmaAllocation* pAllocation,
975 VmaAllocator allocator,
976 VmaAllocation allocation);
980 VmaAllocator allocator,
981 VmaAllocation allocation,
986 VmaAllocator allocator,
987 VmaAllocation allocation,
1001 VmaAllocator allocator,
1002 VmaAllocation* pAllocation);
1013 VmaAllocator allocator,
1014 VmaAllocation allocation,
1018 VmaAllocator allocator,
1019 VmaAllocation allocation);
1147 VmaAllocator allocator,
1148 VmaAllocation* pAllocations,
1149 size_t allocationCount,
1150 VkBool32* pAllocationsChanged,
1180 VmaAllocator allocator,
1181 const VkBufferCreateInfo* pBufferCreateInfo,
1184 VmaAllocation* pAllocation,
1188 VmaAllocator allocator,
1190 VmaAllocation allocation);
1194 VmaAllocator allocator,
1195 const VkImageCreateInfo* pImageCreateInfo,
1198 VmaAllocation* pAllocation,
1202 VmaAllocator allocator,
1204 VmaAllocation allocation);
1208 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1211 #ifdef __INTELLISENSE__ 1212 #define VMA_IMPLEMENTATION 1215 #ifdef VMA_IMPLEMENTATION 1216 #undef VMA_IMPLEMENTATION 1238 #if VMA_USE_STL_CONTAINERS 1239 #define VMA_USE_STL_VECTOR 1 1240 #define VMA_USE_STL_UNORDERED_MAP 1 1241 #define VMA_USE_STL_LIST 1 1244 #if VMA_USE_STL_VECTOR 1248 #if VMA_USE_STL_UNORDERED_MAP 1249 #include <unordered_map> 1252 #if VMA_USE_STL_LIST 1261 #include <algorithm> 1265 #if !defined(_WIN32) 1272 #define VMA_ASSERT(expr) assert(expr) 1274 #define VMA_ASSERT(expr) 1280 #ifndef VMA_HEAVY_ASSERT 1282 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1284 #define VMA_HEAVY_ASSERT(expr) 1290 #define VMA_NULL nullptr 1293 #ifndef VMA_ALIGN_OF 1294 #define VMA_ALIGN_OF(type) (__alignof(type)) 1297 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1299 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1301 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1305 #ifndef VMA_SYSTEM_FREE 1307 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1309 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1314 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1318 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1322 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1326 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1329 #ifndef VMA_DEBUG_LOG 1330 #define VMA_DEBUG_LOG(format, ...) 1340 #if VMA_STATS_STRING_ENABLED 1341 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1343 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1345 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1347 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1349 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1351 snprintf(outStr, strLen,
"%p", ptr);
1361 void Lock() { m_Mutex.lock(); }
1362 void Unlock() { m_Mutex.unlock(); }
1366 #define VMA_MUTEX VmaMutex 1377 #ifndef VMA_ATOMIC_UINT32 1378 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1381 #ifndef VMA_BEST_FIT 1394 #define VMA_BEST_FIT (1) 1397 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 1402 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 1405 #ifndef VMA_DEBUG_ALIGNMENT 1410 #define VMA_DEBUG_ALIGNMENT (1) 1413 #ifndef VMA_DEBUG_MARGIN 1418 #define VMA_DEBUG_MARGIN (0) 1421 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1426 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1429 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1434 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1437 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1438 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1442 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1443 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1447 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1448 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1452 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1458 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1459 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1462 static inline uint32_t CountBitsSet(uint32_t v)
1464 uint32_t c = v - ((v >> 1) & 0x55555555);
1465 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1466 c = ((c >> 4) + c) & 0x0F0F0F0F;
1467 c = ((c >> 8) + c) & 0x00FF00FF;
1468 c = ((c >> 16) + c) & 0x0000FFFF;
1474 template <
typename T>
1475 static inline T VmaAlignUp(T val, T align)
1477 return (val + align - 1) / align * align;
1481 template <
typename T>
1482 inline T VmaRoundDiv(T x, T y)
1484 return (x + (y / (T)2)) / y;
1489 template<
typename Iterator,
typename Compare>
1490 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1492 Iterator centerValue = end; --centerValue;
1493 Iterator insertIndex = beg;
1494 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1496 if(cmp(*memTypeIndex, *centerValue))
1498 if(insertIndex != memTypeIndex)
1500 VMA_SWAP(*memTypeIndex, *insertIndex);
1505 if(insertIndex != centerValue)
1507 VMA_SWAP(*insertIndex, *centerValue);
1512 template<
typename Iterator,
typename Compare>
1513 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1517 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1518 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1519 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1523 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1525 #endif // #ifndef VMA_SORT 1534 static inline bool VmaBlocksOnSamePage(
1535 VkDeviceSize resourceAOffset,
1536 VkDeviceSize resourceASize,
1537 VkDeviceSize resourceBOffset,
1538 VkDeviceSize pageSize)
1540 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1541 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1542 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1543 VkDeviceSize resourceBStart = resourceBOffset;
1544 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1545 return resourceAEndPage == resourceBStartPage;
1548 enum VmaSuballocationType
1550 VMA_SUBALLOCATION_TYPE_FREE = 0,
1551 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1552 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1553 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1554 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1555 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1556 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1565 static inline bool VmaIsBufferImageGranularityConflict(
1566 VmaSuballocationType suballocType1,
1567 VmaSuballocationType suballocType2)
1569 if(suballocType1 > suballocType2)
1571 VMA_SWAP(suballocType1, suballocType2);
1574 switch(suballocType1)
1576 case VMA_SUBALLOCATION_TYPE_FREE:
1578 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1580 case VMA_SUBALLOCATION_TYPE_BUFFER:
1582 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1583 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1584 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1586 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1587 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1588 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1589 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1591 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1592 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1604 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1605 m_pMutex(useMutex ? &mutex : VMA_NULL)
1622 VMA_MUTEX* m_pMutex;
1625 #if VMA_DEBUG_GLOBAL_MUTEX 1626 static VMA_MUTEX gDebugGlobalMutex;
1627 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1629 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1633 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1644 template <
typename IterT,
typename KeyT,
typename CmpT>
1645 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1647 size_t down = 0, up = (end - beg);
1650 const size_t mid = (down + up) / 2;
1651 if(cmp(*(beg+mid), key))
1666 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1668 if((pAllocationCallbacks != VMA_NULL) &&
1669 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1671 return (*pAllocationCallbacks->pfnAllocation)(
1672 pAllocationCallbacks->pUserData,
1675 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1679 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1683 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1685 if((pAllocationCallbacks != VMA_NULL) &&
1686 (pAllocationCallbacks->pfnFree != VMA_NULL))
1688 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1692 VMA_SYSTEM_FREE(ptr);
1696 template<
typename T>
1697 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1699 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1702 template<
typename T>
1703 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1705 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1708 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1710 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1712 template<
typename T>
1713 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1716 VmaFree(pAllocationCallbacks, ptr);
1719 template<
typename T>
1720 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1724 for(
size_t i = count; i--; )
1728 VmaFree(pAllocationCallbacks, ptr);
1733 template<
typename T>
1734 class VmaStlAllocator
1737 const VkAllocationCallbacks*
const m_pCallbacks;
1738 typedef T value_type;
1740 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1741 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1743 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1744 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1746 template<
typename U>
1747 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1749 return m_pCallbacks == rhs.m_pCallbacks;
1751 template<
typename U>
1752 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1754 return m_pCallbacks != rhs.m_pCallbacks;
1757 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1760 #if VMA_USE_STL_VECTOR 1762 #define VmaVector std::vector 1764 template<
typename T,
typename allocatorT>
1765 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1767 vec.insert(vec.begin() + index, item);
1770 template<
typename T,
typename allocatorT>
1771 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1773 vec.erase(vec.begin() + index);
1776 #else // #if VMA_USE_STL_VECTOR 1781 template<
typename T,
typename AllocatorT>
1785 typedef T value_type;
1787 VmaVector(
const AllocatorT& allocator) :
1788 m_Allocator(allocator),
1795 VmaVector(
size_t count,
const AllocatorT& allocator) :
1796 m_Allocator(allocator),
1797 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1803 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1804 m_Allocator(src.m_Allocator),
1805 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
1806 m_Count(src.m_Count),
1807 m_Capacity(src.m_Count)
1811 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1817 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1820 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1824 resize(rhs.m_Count);
1827 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1833 bool empty()
const {
return m_Count == 0; }
1834 size_t size()
const {
return m_Count; }
1835 T* data() {
return m_pArray; }
1836 const T* data()
const {
return m_pArray; }
1838 T& operator[](
size_t index)
1840 VMA_HEAVY_ASSERT(index < m_Count);
1841 return m_pArray[index];
1843 const T& operator[](
size_t index)
const 1845 VMA_HEAVY_ASSERT(index < m_Count);
1846 return m_pArray[index];
1851 VMA_HEAVY_ASSERT(m_Count > 0);
1854 const T& front()
const 1856 VMA_HEAVY_ASSERT(m_Count > 0);
1861 VMA_HEAVY_ASSERT(m_Count > 0);
1862 return m_pArray[m_Count - 1];
1864 const T& back()
const 1866 VMA_HEAVY_ASSERT(m_Count > 0);
1867 return m_pArray[m_Count - 1];
1870 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1872 newCapacity = VMA_MAX(newCapacity, m_Count);
1874 if((newCapacity < m_Capacity) && !freeMemory)
1876 newCapacity = m_Capacity;
1879 if(newCapacity != m_Capacity)
1881 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1884 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1886 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1887 m_Capacity = newCapacity;
1888 m_pArray = newArray;
1892 void resize(
size_t newCount,
bool freeMemory =
false)
1894 size_t newCapacity = m_Capacity;
1895 if(newCount > m_Capacity)
1897 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1901 newCapacity = newCount;
1904 if(newCapacity != m_Capacity)
1906 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1907 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1908 if(elementsToCopy != 0)
1910 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1912 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1913 m_Capacity = newCapacity;
1914 m_pArray = newArray;
1920 void clear(
bool freeMemory =
false)
1922 resize(0, freeMemory);
1925 void insert(
size_t index,
const T& src)
1927 VMA_HEAVY_ASSERT(index <= m_Count);
1928 const size_t oldCount = size();
1929 resize(oldCount + 1);
1930 if(index < oldCount)
1932 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1934 m_pArray[index] = src;
1937 void remove(
size_t index)
1939 VMA_HEAVY_ASSERT(index < m_Count);
1940 const size_t oldCount = size();
1941 if(index < oldCount - 1)
1943 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1945 resize(oldCount - 1);
1948 void push_back(
const T& src)
1950 const size_t newIndex = size();
1951 resize(newIndex + 1);
1952 m_pArray[newIndex] = src;
1957 VMA_HEAVY_ASSERT(m_Count > 0);
1961 void push_front(
const T& src)
1968 VMA_HEAVY_ASSERT(m_Count > 0);
1972 typedef T* iterator;
1974 iterator begin() {
return m_pArray; }
1975 iterator end() {
return m_pArray + m_Count; }
1978 AllocatorT m_Allocator;
1984 template<
typename T,
typename allocatorT>
1985 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
1987 vec.insert(index, item);
1990 template<
typename T,
typename allocatorT>
1991 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
1996 #endif // #if VMA_USE_STL_VECTOR 1998 template<
typename CmpLess,
typename VectorT>
1999 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2001 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2003 vector.data() + vector.size(),
2005 CmpLess()) - vector.data();
2006 VmaVectorInsert(vector, indexToInsert, value);
2007 return indexToInsert;
2010 template<
typename CmpLess,
typename VectorT>
2011 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2014 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2016 vector.data() + vector.size(),
2019 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2021 size_t indexToRemove = it - vector.begin();
2022 VmaVectorRemove(vector, indexToRemove);
2028 template<
typename CmpLess,
typename VectorT>
2029 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2032 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2034 vector.data() + vector.size(),
2037 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2039 return it - vector.begin();
2043 return vector.size();
2055 template<
typename T>
2056 class VmaPoolAllocator
2059 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2060 ~VmaPoolAllocator();
2068 uint32_t NextFreeIndex;
2075 uint32_t FirstFreeIndex;
2078 const VkAllocationCallbacks* m_pAllocationCallbacks;
2079 size_t m_ItemsPerBlock;
2080 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2082 ItemBlock& CreateNewBlock();
2085 template<
typename T>
2086 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2087 m_pAllocationCallbacks(pAllocationCallbacks),
2088 m_ItemsPerBlock(itemsPerBlock),
2089 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2091 VMA_ASSERT(itemsPerBlock > 0);
2094 template<
typename T>
2095 VmaPoolAllocator<T>::~VmaPoolAllocator()
2100 template<
typename T>
2101 void VmaPoolAllocator<T>::Clear()
2103 for(
size_t i = m_ItemBlocks.size(); i--; )
2104 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2105 m_ItemBlocks.clear();
2108 template<
typename T>
2109 T* VmaPoolAllocator<T>::Alloc()
2111 for(
size_t i = m_ItemBlocks.size(); i--; )
2113 ItemBlock& block = m_ItemBlocks[i];
2115 if(block.FirstFreeIndex != UINT32_MAX)
2117 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2118 block.FirstFreeIndex = pItem->NextFreeIndex;
2119 return &pItem->Value;
2124 ItemBlock& newBlock = CreateNewBlock();
2125 Item*
const pItem = &newBlock.pItems[0];
2126 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2127 return &pItem->Value;
2130 template<
typename T>
2131 void VmaPoolAllocator<T>::Free(T* ptr)
2134 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2136 ItemBlock& block = m_ItemBlocks[i];
2140 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2143 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2145 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2146 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2147 block.FirstFreeIndex = index;
2151 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2154 template<
typename T>
2155 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2157 ItemBlock newBlock = {
2158 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2160 m_ItemBlocks.push_back(newBlock);
2163 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2164 newBlock.pItems[i].NextFreeIndex = i + 1;
2165 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2166 return m_ItemBlocks.back();
2172 #if VMA_USE_STL_LIST 2174 #define VmaList std::list 2176 #else // #if VMA_USE_STL_LIST 2178 template<
typename T>
2187 template<
typename T>
2191 typedef VmaListItem<T> ItemType;
2193 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2197 size_t GetCount()
const {
return m_Count; }
2198 bool IsEmpty()
const {
return m_Count == 0; }
2200 ItemType* Front() {
return m_pFront; }
2201 const ItemType* Front()
const {
return m_pFront; }
2202 ItemType* Back() {
return m_pBack; }
2203 const ItemType* Back()
const {
return m_pBack; }
2205 ItemType* PushBack();
2206 ItemType* PushFront();
2207 ItemType* PushBack(
const T& value);
2208 ItemType* PushFront(
const T& value);
2213 ItemType* InsertBefore(ItemType* pItem);
2215 ItemType* InsertAfter(ItemType* pItem);
2217 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2218 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2220 void Remove(ItemType* pItem);
2223 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2224 VmaPoolAllocator<ItemType> m_ItemAllocator;
2230 VmaRawList(
const VmaRawList<T>& src);
2231 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2234 template<
typename T>
2235 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2236 m_pAllocationCallbacks(pAllocationCallbacks),
2237 m_ItemAllocator(pAllocationCallbacks, 128),
2244 template<
typename T>
2245 VmaRawList<T>::~VmaRawList()
2251 template<
typename T>
2252 void VmaRawList<T>::Clear()
2254 if(IsEmpty() ==
false)
2256 ItemType* pItem = m_pBack;
2257 while(pItem != VMA_NULL)
2259 ItemType*
const pPrevItem = pItem->pPrev;
2260 m_ItemAllocator.Free(pItem);
2263 m_pFront = VMA_NULL;
2269 template<
typename T>
2270 VmaListItem<T>* VmaRawList<T>::PushBack()
2272 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2273 pNewItem->pNext = VMA_NULL;
2276 pNewItem->pPrev = VMA_NULL;
2277 m_pFront = pNewItem;
2283 pNewItem->pPrev = m_pBack;
2284 m_pBack->pNext = pNewItem;
2291 template<
typename T>
2292 VmaListItem<T>* VmaRawList<T>::PushFront()
2294 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2295 pNewItem->pPrev = VMA_NULL;
2298 pNewItem->pNext = VMA_NULL;
2299 m_pFront = pNewItem;
2305 pNewItem->pNext = m_pFront;
2306 m_pFront->pPrev = pNewItem;
2307 m_pFront = pNewItem;
2313 template<
typename T>
2314 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2316 ItemType*
const pNewItem = PushBack();
2317 pNewItem->Value = value;
2321 template<
typename T>
2322 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2324 ItemType*
const pNewItem = PushFront();
2325 pNewItem->Value = value;
2329 template<
typename T>
2330 void VmaRawList<T>::PopBack()
2332 VMA_HEAVY_ASSERT(m_Count > 0);
2333 ItemType*
const pBackItem = m_pBack;
2334 ItemType*
const pPrevItem = pBackItem->pPrev;
2335 if(pPrevItem != VMA_NULL)
2337 pPrevItem->pNext = VMA_NULL;
2339 m_pBack = pPrevItem;
2340 m_ItemAllocator.Free(pBackItem);
2344 template<
typename T>
2345 void VmaRawList<T>::PopFront()
2347 VMA_HEAVY_ASSERT(m_Count > 0);
2348 ItemType*
const pFrontItem = m_pFront;
2349 ItemType*
const pNextItem = pFrontItem->pNext;
2350 if(pNextItem != VMA_NULL)
2352 pNextItem->pPrev = VMA_NULL;
2354 m_pFront = pNextItem;
2355 m_ItemAllocator.Free(pFrontItem);
2359 template<
typename T>
2360 void VmaRawList<T>::Remove(ItemType* pItem)
2362 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2363 VMA_HEAVY_ASSERT(m_Count > 0);
2365 if(pItem->pPrev != VMA_NULL)
2367 pItem->pPrev->pNext = pItem->pNext;
2371 VMA_HEAVY_ASSERT(m_pFront == pItem);
2372 m_pFront = pItem->pNext;
2375 if(pItem->pNext != VMA_NULL)
2377 pItem->pNext->pPrev = pItem->pPrev;
2381 VMA_HEAVY_ASSERT(m_pBack == pItem);
2382 m_pBack = pItem->pPrev;
2385 m_ItemAllocator.Free(pItem);
2389 template<
typename T>
2390 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2392 if(pItem != VMA_NULL)
2394 ItemType*
const prevItem = pItem->pPrev;
2395 ItemType*
const newItem = m_ItemAllocator.Alloc();
2396 newItem->pPrev = prevItem;
2397 newItem->pNext = pItem;
2398 pItem->pPrev = newItem;
2399 if(prevItem != VMA_NULL)
2401 prevItem->pNext = newItem;
2405 VMA_HEAVY_ASSERT(m_pFront == pItem);
2415 template<
typename T>
2416 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2418 if(pItem != VMA_NULL)
2420 ItemType*
const nextItem = pItem->pNext;
2421 ItemType*
const newItem = m_ItemAllocator.Alloc();
2422 newItem->pNext = nextItem;
2423 newItem->pPrev = pItem;
2424 pItem->pNext = newItem;
2425 if(nextItem != VMA_NULL)
2427 nextItem->pPrev = newItem;
2431 VMA_HEAVY_ASSERT(m_pBack == pItem);
2441 template<
typename T>
2442 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2444 ItemType*
const newItem = InsertBefore(pItem);
2445 newItem->Value = value;
2449 template<
typename T>
2450 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2452 ItemType*
const newItem = InsertAfter(pItem);
2453 newItem->Value = value;
2457 template<
typename T,
typename AllocatorT>
2470 T& operator*()
const 2472 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2473 return m_pItem->Value;
2475 T* operator->()
const 2477 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2478 return &m_pItem->Value;
2481 iterator& operator++()
2483 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2484 m_pItem = m_pItem->pNext;
2487 iterator& operator--()
2489 if(m_pItem != VMA_NULL)
2491 m_pItem = m_pItem->pPrev;
2495 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2496 m_pItem = m_pList->Back();
2501 iterator operator++(
int)
2503 iterator result = *
this;
2507 iterator operator--(
int)
2509 iterator result = *
this;
2514 bool operator==(
const iterator& rhs)
const 2516 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2517 return m_pItem == rhs.m_pItem;
2519 bool operator!=(
const iterator& rhs)
const 2521 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2522 return m_pItem != rhs.m_pItem;
2526 VmaRawList<T>* m_pList;
2527 VmaListItem<T>* m_pItem;
2529 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2535 friend class VmaList<T, AllocatorT>;
2538 class const_iterator
2547 const_iterator(
const iterator& src) :
2548 m_pList(src.m_pList),
2549 m_pItem(src.m_pItem)
2553 const T& operator*()
const 2555 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2556 return m_pItem->Value;
2558 const T* operator->()
const 2560 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2561 return &m_pItem->Value;
2564 const_iterator& operator++()
2566 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2567 m_pItem = m_pItem->pNext;
2570 const_iterator& operator--()
2572 if(m_pItem != VMA_NULL)
2574 m_pItem = m_pItem->pPrev;
2578 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2579 m_pItem = m_pList->Back();
2584 const_iterator operator++(
int)
2586 const_iterator result = *
this;
2590 const_iterator operator--(
int)
2592 const_iterator result = *
this;
2597 bool operator==(
const const_iterator& rhs)
const 2599 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2600 return m_pItem == rhs.m_pItem;
2602 bool operator!=(
const const_iterator& rhs)
const 2604 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2605 return m_pItem != rhs.m_pItem;
2609 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2615 const VmaRawList<T>* m_pList;
2616 const VmaListItem<T>* m_pItem;
2618 friend class VmaList<T, AllocatorT>;
2621 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2623 bool empty()
const {
return m_RawList.IsEmpty(); }
2624 size_t size()
const {
return m_RawList.GetCount(); }
2626 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2627 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2629 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2630 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2632 void clear() { m_RawList.Clear(); }
2633 void push_back(
const T& value) { m_RawList.PushBack(value); }
2634 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2635 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2638 VmaRawList<T> m_RawList;
2641 #endif // #if VMA_USE_STL_LIST 2649 #if VMA_USE_STL_UNORDERED_MAP 2651 #define VmaPair std::pair 2653 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2654 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2656 #else // #if VMA_USE_STL_UNORDERED_MAP 2658 template<
typename T1,
typename T2>
2664 VmaPair() : first(), second() { }
2665 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2671 template<
typename KeyT,
typename ValueT>
2675 typedef VmaPair<KeyT, ValueT> PairType;
2676 typedef PairType* iterator;
2678 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2680 iterator begin() {
return m_Vector.begin(); }
2681 iterator end() {
return m_Vector.end(); }
2683 void insert(
const PairType& pair);
2684 iterator find(
const KeyT& key);
2685 void erase(iterator it);
2688 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2691 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2693 template<
typename FirstT,
typename SecondT>
2694 struct VmaPairFirstLess
2696 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2698 return lhs.first < rhs.first;
2700 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2702 return lhs.first < rhsFirst;
2706 template<
typename KeyT,
typename ValueT>
2707 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2709 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2711 m_Vector.data() + m_Vector.size(),
2713 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2714 VmaVectorInsert(m_Vector, indexToInsert, pair);
2717 template<
typename KeyT,
typename ValueT>
2718 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2720 PairType* it = VmaBinaryFindFirstNotLess(
2722 m_Vector.data() + m_Vector.size(),
2724 VmaPairFirstLess<KeyT, ValueT>());
2725 if((it != m_Vector.end()) && (it->first == key))
2731 return m_Vector.end();
2735 template<
typename KeyT,
typename ValueT>
2736 void VmaMap<KeyT, ValueT>::erase(iterator it)
2738 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2741 #endif // #if VMA_USE_STL_UNORDERED_MAP 2747 class VmaDeviceMemoryBlock;
2749 enum VMA_BLOCK_VECTOR_TYPE
2751 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2752 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2753 VMA_BLOCK_VECTOR_TYPE_COUNT
2759 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2760 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2763 struct VmaAllocation_T
2766 enum ALLOCATION_TYPE
2768 ALLOCATION_TYPE_NONE,
2769 ALLOCATION_TYPE_BLOCK,
2770 ALLOCATION_TYPE_OWN,
2773 VmaAllocation_T(uint32_t currentFrameIndex) :
2776 m_pUserData(VMA_NULL),
2777 m_Type(ALLOCATION_TYPE_NONE),
2778 m_SuballocationType(VMA_SUBALLOCATION_TYPE_UNKNOWN),
2779 m_LastUseFrameIndex(currentFrameIndex)
2783 void InitBlockAllocation(
2785 VmaDeviceMemoryBlock* block,
2786 VkDeviceSize offset,
2787 VkDeviceSize alignment,
2789 VmaSuballocationType suballocationType,
2793 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2794 VMA_ASSERT(block != VMA_NULL);
2795 m_Type = ALLOCATION_TYPE_BLOCK;
2796 m_Alignment = alignment;
2798 m_pUserData = pUserData;
2799 m_SuballocationType = suballocationType;
2800 m_BlockAllocation.m_hPool = hPool;
2801 m_BlockAllocation.m_Block = block;
2802 m_BlockAllocation.m_Offset = offset;
2803 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
2808 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2809 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
2810 m_Type = ALLOCATION_TYPE_BLOCK;
2811 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
2812 m_BlockAllocation.m_Block = VMA_NULL;
2813 m_BlockAllocation.m_Offset = 0;
2814 m_BlockAllocation.m_CanBecomeLost =
true;
2817 void ChangeBlockAllocation(
2818 VmaDeviceMemoryBlock* block,
2819 VkDeviceSize offset)
2821 VMA_ASSERT(block != VMA_NULL);
2822 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2823 m_BlockAllocation.m_Block = block;
2824 m_BlockAllocation.m_Offset = offset;
2827 void InitOwnAllocation(
2828 uint32_t memoryTypeIndex,
2829 VkDeviceMemory hMemory,
2830 VmaSuballocationType suballocationType,
2836 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2837 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2838 m_Type = ALLOCATION_TYPE_OWN;
2841 m_pUserData = pUserData;
2842 m_SuballocationType = suballocationType;
2843 m_OwnAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2844 m_OwnAllocation.m_hMemory = hMemory;
2845 m_OwnAllocation.m_PersistentMap = persistentMap;
2846 m_OwnAllocation.m_pMappedData = pMappedData;
2849 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2850 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2851 VkDeviceSize GetSize()
const {
return m_Size; }
2852 void* GetUserData()
const {
return m_pUserData; }
2853 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2854 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2856 VmaDeviceMemoryBlock* GetBlock()
const 2858 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2859 return m_BlockAllocation.m_Block;
2861 VkDeviceSize GetOffset()
const;
2862 VkDeviceMemory GetMemory()
const;
2863 uint32_t GetMemoryTypeIndex()
const;
2864 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2865 void* GetMappedData()
const;
2866 bool CanBecomeLost()
const;
2867 VmaPool GetPool()
const;
2869 VkResult OwnAllocMapPersistentlyMappedMemory(VkDevice hDevice)
2871 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2872 if(m_OwnAllocation.m_PersistentMap)
2874 return vkMapMemory(hDevice, m_OwnAllocation.m_hMemory, 0, VK_WHOLE_SIZE, 0, &m_OwnAllocation.m_pMappedData);
2878 void OwnAllocUnmapPersistentlyMappedMemory(VkDevice hDevice)
2880 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2881 if(m_OwnAllocation.m_pMappedData)
2883 VMA_ASSERT(m_OwnAllocation.m_PersistentMap);
2884 vkUnmapMemory(hDevice, m_OwnAllocation.m_hMemory);
2885 m_OwnAllocation.m_pMappedData = VMA_NULL;
2889 uint32_t GetLastUseFrameIndex()
const 2891 return m_LastUseFrameIndex.load();
2893 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
2895 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
2905 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
2909 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2921 VkDeviceSize m_Alignment;
2922 VkDeviceSize m_Size;
2924 ALLOCATION_TYPE m_Type;
2925 VmaSuballocationType m_SuballocationType;
2926 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
2929 struct BlockAllocation
2932 VmaDeviceMemoryBlock* m_Block;
2933 VkDeviceSize m_Offset;
2934 bool m_CanBecomeLost;
2938 struct OwnAllocation
2940 uint32_t m_MemoryTypeIndex;
2941 VkDeviceMemory m_hMemory;
2942 bool m_PersistentMap;
2943 void* m_pMappedData;
2949 BlockAllocation m_BlockAllocation;
2951 OwnAllocation m_OwnAllocation;
2959 struct VmaSuballocation
2961 VkDeviceSize offset;
2963 VmaAllocation hAllocation;
2964 VmaSuballocationType type;
2967 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
2970 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
2985 struct VmaAllocationRequest
2987 VkDeviceSize offset;
2988 VkDeviceSize sumFreeSize;
2989 VkDeviceSize sumItemSize;
2990 VmaSuballocationList::iterator item;
2991 size_t itemsToMakeLostCount;
2993 VkDeviceSize CalcCost()
const 2995 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3005 class VmaDeviceMemoryBlock
3008 uint32_t m_MemoryTypeIndex;
3009 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3010 VkDeviceMemory m_hMemory;
3011 VkDeviceSize m_Size;
3012 bool m_PersistentMap;
3013 void* m_pMappedData;
3014 uint32_t m_FreeCount;
3015 VkDeviceSize m_SumFreeSize;
3016 VmaSuballocationList m_Suballocations;
3019 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3021 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3023 ~VmaDeviceMemoryBlock()
3025 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3030 uint32_t newMemoryTypeIndex,
3031 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
3032 VkDeviceMemory newMemory,
3033 VkDeviceSize newSize,
3037 void Destroy(VmaAllocator allocator);
3040 bool Validate()
const;
3045 bool CreateAllocationRequest(
3046 uint32_t currentFrameIndex,
3047 uint32_t frameInUseCount,
3048 VkDeviceSize bufferImageGranularity,
3049 VkDeviceSize allocSize,
3050 VkDeviceSize allocAlignment,
3051 VmaSuballocationType allocType,
3052 bool canMakeOtherLost,
3053 VmaAllocationRequest* pAllocationRequest);
3055 bool MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest);
3057 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3060 bool IsEmpty()
const;
3065 const VmaAllocationRequest& request,
3066 VmaSuballocationType type,
3067 VkDeviceSize allocSize,
3068 VmaAllocation hAllocation);
3071 void Free(
const VmaAllocation allocation);
3073 #if VMA_STATS_STRING_ENABLED 3074 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3080 bool CheckAllocation(
3081 uint32_t currentFrameIndex,
3082 uint32_t frameInUseCount,
3083 VkDeviceSize bufferImageGranularity,
3084 VkDeviceSize allocSize,
3085 VkDeviceSize allocAlignment,
3086 VmaSuballocationType allocType,
3087 VmaSuballocationList::const_iterator suballocItem,
3088 bool canMakeOtherLost,
3089 VkDeviceSize* pOffset,
3090 size_t* itemsToMakeLostCount,
3091 VkDeviceSize* pSumFreeSize,
3092 VkDeviceSize* pSumItemSize)
const;
3095 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3099 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3102 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3105 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3107 bool ValidateFreeSuballocationList()
const;
3110 struct VmaPointerLess
3112 bool operator()(
const void* lhs,
const void* rhs)
const 3118 class VmaDefragmentator;
3126 struct VmaBlockVector
3129 VmaAllocator hAllocator,
3130 uint32_t memoryTypeIndex,
3131 VMA_BLOCK_VECTOR_TYPE blockVectorType,
3132 VkDeviceSize preferredBlockSize,
3133 size_t minBlockCount,
3134 size_t maxBlockCount,
3135 VkDeviceSize bufferImageGranularity,
3136 uint32_t frameInUseCount,
3140 VkResult CreateMinBlocks();
3142 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3143 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3144 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3145 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3146 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const {
return m_BlockVectorType; }
3150 bool IsEmpty()
const {
return m_Blocks.empty(); }
3153 VmaPool hCurrentPool,
3154 uint32_t currentFrameIndex,
3155 const VkMemoryRequirements& vkMemReq,
3157 VmaSuballocationType suballocType,
3158 VmaAllocation* pAllocation);
3161 VmaAllocation hAllocation);
3166 #if VMA_STATS_STRING_ENABLED 3167 void PrintDetailedMap(
class VmaJsonWriter& json);
3170 void UnmapPersistentlyMappedMemory();
3171 VkResult MapPersistentlyMappedMemory();
3173 void MakePoolAllocationsLost(
3174 uint32_t currentFrameIndex,
3175 size_t* pLostAllocationCount);
3177 VmaDefragmentator* EnsureDefragmentator(
3179 const VkAllocationCallbacks* pAllocationCallbacks,
3180 uint32_t currentFrameIndex);
3182 VkResult Defragment(
3184 VkDeviceSize& maxBytesToMove,
3185 uint32_t& maxAllocationsToMove);
3187 void DestroyDefragmentator();
3190 friend class VmaDefragmentator;
3192 const VmaAllocator m_hAllocator;
3193 const uint32_t m_MemoryTypeIndex;
3194 const VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3195 const VkDeviceSize m_PreferredBlockSize;
3196 const size_t m_MinBlockCount;
3197 const size_t m_MaxBlockCount;
3198 const VkDeviceSize m_BufferImageGranularity;
3199 const uint32_t m_FrameInUseCount;
3200 const bool m_IsCustomPool;
3203 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3207 bool m_HasEmptyBlock;
3208 VmaDefragmentator* m_pDefragmentator;
3211 void Remove(VmaDeviceMemoryBlock* pBlock);
3215 void IncrementallySortBlocks();
3217 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3223 VmaBlockVector m_BlockVector;
3227 VmaAllocator hAllocator,
3231 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3233 #if VMA_STATS_STRING_ENABLED 3238 class VmaDefragmentator
3240 const VkDevice m_hDevice;
3241 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3242 VmaBlockVector*
const m_pBlockVector;
3243 uint32_t m_CurrentFrameIndex;
3244 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3245 VkDeviceSize m_BytesMoved;
3246 uint32_t m_AllocationsMoved;
3248 struct AllocationInfo
3250 VmaAllocation m_hAllocation;
3251 VkBool32* m_pChanged;
3254 m_hAllocation(VK_NULL_HANDLE),
3255 m_pChanged(VMA_NULL)
3260 struct AllocationInfoSizeGreater
3262 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3264 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3269 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3273 VmaDeviceMemoryBlock* m_pBlock;
3274 bool m_HasNonMovableAllocations;
3275 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3277 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3279 m_HasNonMovableAllocations(true),
3280 m_Allocations(pAllocationCallbacks),
3281 m_pMappedDataForDefragmentation(VMA_NULL)
3285 void CalcHasNonMovableAllocations()
3287 const size_t blockAllocCount =
3288 m_pBlock->m_Suballocations.size() - m_pBlock->m_FreeCount;
3289 const size_t defragmentAllocCount = m_Allocations.size();
3290 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3293 void SortAllocationsBySizeDescecnding()
3295 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3298 VkResult EnsureMapping(VkDevice hDevice,
void** ppMappedData)
3301 if(m_pMappedDataForDefragmentation)
3303 *ppMappedData = m_pMappedDataForDefragmentation;
3308 if(m_pBlock->m_PersistentMap)
3310 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
3311 *ppMappedData = m_pBlock->m_pMappedData;
3316 VkResult res = vkMapMemory(hDevice, m_pBlock->m_hMemory, 0, VK_WHOLE_SIZE, 0, &m_pMappedDataForDefragmentation);
3317 *ppMappedData = m_pMappedDataForDefragmentation;
3321 void Unmap(VkDevice hDevice)
3323 if(m_pMappedDataForDefragmentation != VMA_NULL)
3325 vkUnmapMemory(hDevice, m_pBlock->m_hMemory);
3331 void* m_pMappedDataForDefragmentation;
3334 struct BlockPointerLess
3336 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3338 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3340 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3342 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3348 struct BlockInfoCompareMoveDestination
3350 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3352 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3356 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3360 if(pLhsBlockInfo->m_pBlock->m_SumFreeSize < pRhsBlockInfo->m_pBlock->m_SumFreeSize)
3368 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3369 BlockInfoVector m_Blocks;
3371 VkResult DefragmentRound(
3372 VkDeviceSize maxBytesToMove,
3373 uint32_t maxAllocationsToMove);
3375 static bool MoveMakesSense(
3376 size_t dstBlockIndex, VkDeviceSize dstOffset,
3377 size_t srcBlockIndex, VkDeviceSize srcOffset);
3382 const VkAllocationCallbacks* pAllocationCallbacks,
3383 VmaBlockVector* pBlockVector,
3384 uint32_t currentFrameIndex);
3386 ~VmaDefragmentator();
3388 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3389 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3391 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3393 VkResult Defragment(
3394 VkDeviceSize maxBytesToMove,
3395 uint32_t maxAllocationsToMove);
3399 struct VmaAllocator_T
3403 bool m_AllocationCallbacksSpecified;
3404 VkAllocationCallbacks m_AllocationCallbacks;
3408 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
3411 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3412 VMA_MUTEX m_HeapSizeLimitMutex;
3414 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3415 VkPhysicalDeviceMemoryProperties m_MemProps;
3418 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3421 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3422 AllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3423 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
3428 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3430 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3433 VkDeviceSize GetBufferImageGranularity()
const 3436 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3437 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3440 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3441 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3443 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3445 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3446 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3450 VkResult AllocateMemory(
3451 const VkMemoryRequirements& vkMemReq,
3453 VmaSuballocationType suballocType,
3454 VmaAllocation* pAllocation);
3457 void FreeMemory(
const VmaAllocation allocation);
3459 void CalculateStats(
VmaStats* pStats);
3461 #if VMA_STATS_STRING_ENABLED 3462 void PrintDetailedMap(
class VmaJsonWriter& json);
3465 void UnmapPersistentlyMappedMemory();
3466 VkResult MapPersistentlyMappedMemory();
3468 VkResult Defragment(
3469 VmaAllocation* pAllocations,
3470 size_t allocationCount,
3471 VkBool32* pAllocationsChanged,
3475 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3478 void DestroyPool(VmaPool pool);
3479 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3481 void SetCurrentFrameIndex(uint32_t frameIndex);
3483 void MakePoolAllocationsLost(
3485 size_t* pLostAllocationCount);
3487 void CreateLostAllocation(VmaAllocation* pAllocation);
3489 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3490 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3493 VkDeviceSize m_PreferredLargeHeapBlockSize;
3494 VkDeviceSize m_PreferredSmallHeapBlockSize;
3496 VkPhysicalDevice m_PhysicalDevice;
3497 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3499 VMA_MUTEX m_PoolsMutex;
3501 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3503 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3505 VkResult AllocateMemoryOfType(
3506 const VkMemoryRequirements& vkMemReq,
3508 uint32_t memTypeIndex,
3509 VmaSuballocationType suballocType,
3510 VmaAllocation* pAllocation);
3513 VkResult AllocateOwnMemory(
3515 VmaSuballocationType suballocType,
3516 uint32_t memTypeIndex,
3519 VmaAllocation* pAllocation);
3522 void FreeOwnMemory(VmaAllocation allocation);
3528 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3530 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3533 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3535 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3538 template<
typename T>
3539 static T* VmaAllocate(VmaAllocator hAllocator)
3541 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3544 template<
typename T>
3545 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3547 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3550 template<
typename T>
3551 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3556 VmaFree(hAllocator, ptr);
3560 template<
typename T>
3561 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3565 for(
size_t i = count; i--; )
3567 VmaFree(hAllocator, ptr);
3574 #if VMA_STATS_STRING_ENABLED 3576 class VmaStringBuilder
3579 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3580 size_t GetLength()
const {
return m_Data.size(); }
3581 const char* GetData()
const {
return m_Data.data(); }
3583 void Add(
char ch) { m_Data.push_back(ch); }
3584 void Add(
const char* pStr);
3585 void AddNewLine() { Add(
'\n'); }
3586 void AddNumber(uint32_t num);
3587 void AddNumber(uint64_t num);
3588 void AddPointer(
const void* ptr);
3591 VmaVector< char, VmaStlAllocator<char> > m_Data;
3594 void VmaStringBuilder::Add(
const char* pStr)
3596 const size_t strLen = strlen(pStr);
3599 const size_t oldCount = m_Data.size();
3600 m_Data.resize(oldCount + strLen);
3601 memcpy(m_Data.data() + oldCount, pStr, strLen);
3605 void VmaStringBuilder::AddNumber(uint32_t num)
3608 VmaUint32ToStr(buf,
sizeof(buf), num);
3612 void VmaStringBuilder::AddNumber(uint64_t num)
3615 VmaUint64ToStr(buf,
sizeof(buf), num);
3619 void VmaStringBuilder::AddPointer(
const void* ptr)
3622 VmaPtrToStr(buf,
sizeof(buf), ptr);
3626 #endif // #if VMA_STATS_STRING_ENABLED 3631 #if VMA_STATS_STRING_ENABLED 3636 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3639 void BeginObject(
bool singleLine =
false);
3642 void BeginArray(
bool singleLine =
false);
3645 void WriteString(
const char* pStr);
3646 void BeginString(
const char* pStr = VMA_NULL);
3647 void ContinueString(
const char* pStr);
3648 void ContinueString(uint32_t n);
3649 void ContinueString(uint64_t n);
3650 void EndString(
const char* pStr = VMA_NULL);
3652 void WriteNumber(uint32_t n);
3653 void WriteNumber(uint64_t n);
3654 void WriteBool(
bool b);
3658 static const char*
const INDENT;
3660 enum COLLECTION_TYPE
3662 COLLECTION_TYPE_OBJECT,
3663 COLLECTION_TYPE_ARRAY,
3667 COLLECTION_TYPE type;
3668 uint32_t valueCount;
3669 bool singleLineMode;
3672 VmaStringBuilder& m_SB;
3673 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3674 bool m_InsideString;
3676 void BeginValue(
bool isString);
3677 void WriteIndent(
bool oneLess =
false);
3680 const char*
const VmaJsonWriter::INDENT =
" ";
3682 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3684 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3685 m_InsideString(false)
3689 VmaJsonWriter::~VmaJsonWriter()
3691 VMA_ASSERT(!m_InsideString);
3692 VMA_ASSERT(m_Stack.empty());
3695 void VmaJsonWriter::BeginObject(
bool singleLine)
3697 VMA_ASSERT(!m_InsideString);
3703 item.type = COLLECTION_TYPE_OBJECT;
3704 item.valueCount = 0;
3705 item.singleLineMode = singleLine;
3706 m_Stack.push_back(item);
3709 void VmaJsonWriter::EndObject()
3711 VMA_ASSERT(!m_InsideString);
3716 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3720 void VmaJsonWriter::BeginArray(
bool singleLine)
3722 VMA_ASSERT(!m_InsideString);
3728 item.type = COLLECTION_TYPE_ARRAY;
3729 item.valueCount = 0;
3730 item.singleLineMode = singleLine;
3731 m_Stack.push_back(item);
3734 void VmaJsonWriter::EndArray()
3736 VMA_ASSERT(!m_InsideString);
3741 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3745 void VmaJsonWriter::WriteString(
const char* pStr)
3751 void VmaJsonWriter::BeginString(
const char* pStr)
3753 VMA_ASSERT(!m_InsideString);
3757 m_InsideString =
true;
3758 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3760 ContinueString(pStr);
3764 void VmaJsonWriter::ContinueString(
const char* pStr)
3766 VMA_ASSERT(m_InsideString);
3768 const size_t strLen = strlen(pStr);
3769 for(
size_t i = 0; i < strLen; ++i)
3796 VMA_ASSERT(0 &&
"Character not currently supported.");
3802 void VmaJsonWriter::ContinueString(uint32_t n)
3804 VMA_ASSERT(m_InsideString);
3808 void VmaJsonWriter::ContinueString(uint64_t n)
3810 VMA_ASSERT(m_InsideString);
3814 void VmaJsonWriter::EndString(
const char* pStr)
3816 VMA_ASSERT(m_InsideString);
3817 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3819 ContinueString(pStr);
3822 m_InsideString =
false;
3825 void VmaJsonWriter::WriteNumber(uint32_t n)
3827 VMA_ASSERT(!m_InsideString);
3832 void VmaJsonWriter::WriteNumber(uint64_t n)
3834 VMA_ASSERT(!m_InsideString);
3839 void VmaJsonWriter::WriteBool(
bool b)
3841 VMA_ASSERT(!m_InsideString);
3843 m_SB.Add(b ?
"true" :
"false");
3846 void VmaJsonWriter::WriteNull()
3848 VMA_ASSERT(!m_InsideString);
3853 void VmaJsonWriter::BeginValue(
bool isString)
3855 if(!m_Stack.empty())
3857 StackItem& currItem = m_Stack.back();
3858 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3859 currItem.valueCount % 2 == 0)
3861 VMA_ASSERT(isString);
3864 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3865 currItem.valueCount % 2 != 0)
3869 else if(currItem.valueCount > 0)
3878 ++currItem.valueCount;
3882 void VmaJsonWriter::WriteIndent(
bool oneLess)
3884 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
3888 size_t count = m_Stack.size();
3889 if(count > 0 && oneLess)
3893 for(
size_t i = 0; i < count; ++i)
3900 #endif // #if VMA_STATS_STRING_ENABLED 3904 VkDeviceSize VmaAllocation_T::GetOffset()
const 3908 case ALLOCATION_TYPE_BLOCK:
3909 return m_BlockAllocation.m_Offset;
3910 case ALLOCATION_TYPE_OWN:
3918 VkDeviceMemory VmaAllocation_T::GetMemory()
const 3922 case ALLOCATION_TYPE_BLOCK:
3923 return m_BlockAllocation.m_Block->m_hMemory;
3924 case ALLOCATION_TYPE_OWN:
3925 return m_OwnAllocation.m_hMemory;
3928 return VK_NULL_HANDLE;
3932 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 3936 case ALLOCATION_TYPE_BLOCK:
3937 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
3938 case ALLOCATION_TYPE_OWN:
3939 return m_OwnAllocation.m_MemoryTypeIndex;
3946 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 3950 case ALLOCATION_TYPE_BLOCK:
3951 return m_BlockAllocation.m_Block->m_BlockVectorType;
3952 case ALLOCATION_TYPE_OWN:
3953 return (m_OwnAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
3956 return VMA_BLOCK_VECTOR_TYPE_COUNT;
3960 void* VmaAllocation_T::GetMappedData()
const 3964 case ALLOCATION_TYPE_BLOCK:
3965 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
3967 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
3974 case ALLOCATION_TYPE_OWN:
3975 return m_OwnAllocation.m_pMappedData;
3982 bool VmaAllocation_T::CanBecomeLost()
const 3986 case ALLOCATION_TYPE_BLOCK:
3987 return m_BlockAllocation.m_CanBecomeLost;
3988 case ALLOCATION_TYPE_OWN:
3996 VmaPool VmaAllocation_T::GetPool()
const 3998 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3999 return m_BlockAllocation.m_hPool;
4002 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4004 VMA_ASSERT(CanBecomeLost());
4010 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4013 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4018 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4024 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4034 #if VMA_STATS_STRING_ENABLED 4037 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4046 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4050 json.WriteString(
"Blocks");
4053 json.WriteString(
"Allocations");
4056 json.WriteString(
"UnusedRanges");
4059 json.WriteString(
"UsedBytes");
4062 json.WriteString(
"UnusedBytes");
4067 json.WriteString(
"AllocationSize");
4068 json.BeginObject(
true);
4069 json.WriteString(
"Min");
4071 json.WriteString(
"Avg");
4073 json.WriteString(
"Max");
4080 json.WriteString(
"UnusedRangeSize");
4081 json.BeginObject(
true);
4082 json.WriteString(
"Min");
4084 json.WriteString(
"Avg");
4086 json.WriteString(
"Max");
4094 #endif // #if VMA_STATS_STRING_ENABLED 4096 struct VmaSuballocationItemSizeLess
4099 const VmaSuballocationList::iterator lhs,
4100 const VmaSuballocationList::iterator rhs)
const 4102 return lhs->size < rhs->size;
4105 const VmaSuballocationList::iterator lhs,
4106 VkDeviceSize rhsSize)
const 4108 return lhs->size < rhsSize;
4112 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
4113 m_MemoryTypeIndex(UINT32_MAX),
4114 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
4115 m_hMemory(VK_NULL_HANDLE),
4117 m_PersistentMap(false),
4118 m_pMappedData(VMA_NULL),
4121 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4122 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4126 void VmaDeviceMemoryBlock::Init(
4127 uint32_t newMemoryTypeIndex,
4128 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
4129 VkDeviceMemory newMemory,
4130 VkDeviceSize newSize,
4134 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4136 m_MemoryTypeIndex = newMemoryTypeIndex;
4137 m_BlockVectorType = newBlockVectorType;
4138 m_hMemory = newMemory;
4140 m_PersistentMap = persistentMap;
4141 m_pMappedData = pMappedData;
4143 m_SumFreeSize = newSize;
4145 m_Suballocations.clear();
4146 m_FreeSuballocationsBySize.clear();
4148 VmaSuballocation suballoc = {};
4149 suballoc.offset = 0;
4150 suballoc.size = newSize;
4151 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4152 suballoc.hAllocation = VK_NULL_HANDLE;
4154 m_Suballocations.push_back(suballoc);
4155 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4157 m_FreeSuballocationsBySize.push_back(suballocItem);
4160 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
4164 VMA_ASSERT(IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
4166 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
4167 if(m_pMappedData != VMA_NULL)
4169 vkUnmapMemory(allocator->m_hDevice, m_hMemory);
4170 m_pMappedData = VMA_NULL;
4173 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Size, m_hMemory);
4174 m_hMemory = VK_NULL_HANDLE;
4177 bool VmaDeviceMemoryBlock::Validate()
const 4179 if((m_hMemory == VK_NULL_HANDLE) ||
4181 m_Suballocations.empty())
4187 VkDeviceSize calculatedOffset = 0;
4189 uint32_t calculatedFreeCount = 0;
4191 VkDeviceSize calculatedSumFreeSize = 0;
4194 size_t freeSuballocationsToRegister = 0;
4196 bool prevFree =
false;
4198 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4199 suballocItem != m_Suballocations.cend();
4202 const VmaSuballocation& subAlloc = *suballocItem;
4205 if(subAlloc.offset != calculatedOffset)
4210 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4212 if(prevFree && currFree)
4216 prevFree = currFree;
4218 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4225 calculatedSumFreeSize += subAlloc.size;
4226 ++calculatedFreeCount;
4227 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4229 ++freeSuballocationsToRegister;
4233 calculatedOffset += subAlloc.size;
4238 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4243 VkDeviceSize lastSize = 0;
4244 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4246 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4249 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4254 if(suballocItem->size < lastSize)
4259 lastSize = suballocItem->size;
4264 (calculatedOffset == m_Size) &&
4265 (calculatedSumFreeSize == m_SumFreeSize) &&
4266 (calculatedFreeCount == m_FreeCount);
4279 bool VmaDeviceMemoryBlock::CreateAllocationRequest(
4280 uint32_t currentFrameIndex,
4281 uint32_t frameInUseCount,
4282 VkDeviceSize bufferImageGranularity,
4283 VkDeviceSize allocSize,
4284 VkDeviceSize allocAlignment,
4285 VmaSuballocationType allocType,
4286 bool canMakeOtherLost,
4287 VmaAllocationRequest* pAllocationRequest)
4289 VMA_ASSERT(allocSize > 0);
4290 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4291 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4292 VMA_HEAVY_ASSERT(Validate());
4295 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4301 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4302 if(freeSuballocCount > 0)
4307 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4308 m_FreeSuballocationsBySize.data(),
4309 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4311 VmaSuballocationItemSizeLess());
4312 size_t index = it - m_FreeSuballocationsBySize.data();
4313 for(; index < freeSuballocCount; ++index)
4318 bufferImageGranularity,
4322 m_FreeSuballocationsBySize[index],
4324 &pAllocationRequest->offset,
4325 &pAllocationRequest->itemsToMakeLostCount,
4326 &pAllocationRequest->sumFreeSize,
4327 &pAllocationRequest->sumItemSize))
4329 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4337 for(
size_t index = freeSuballocCount; index--; )
4342 bufferImageGranularity,
4346 m_FreeSuballocationsBySize[index],
4348 &pAllocationRequest->offset,
4349 &pAllocationRequest->itemsToMakeLostCount,
4350 &pAllocationRequest->sumFreeSize,
4351 &pAllocationRequest->sumItemSize))
4353 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4360 if(canMakeOtherLost)
4364 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4365 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4367 VmaAllocationRequest tmpAllocRequest = {};
4368 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4369 suballocIt != m_Suballocations.end();
4372 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4373 suballocIt->hAllocation->CanBecomeLost())
4378 bufferImageGranularity,
4384 &tmpAllocRequest.offset,
4385 &tmpAllocRequest.itemsToMakeLostCount,
4386 &tmpAllocRequest.sumFreeSize,
4387 &tmpAllocRequest.sumItemSize))
4389 tmpAllocRequest.item = suballocIt;
4391 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4393 *pAllocationRequest = tmpAllocRequest;
4399 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4408 bool VmaDeviceMemoryBlock::MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest)
4410 while(pAllocationRequest->itemsToMakeLostCount > 0)
4412 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4414 ++pAllocationRequest->item;
4416 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4417 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4418 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4419 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4421 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4422 --pAllocationRequest->itemsToMakeLostCount;
4430 VMA_HEAVY_ASSERT(Validate());
4431 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4432 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4437 uint32_t VmaDeviceMemoryBlock::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4439 uint32_t lostAllocationCount = 0;
4440 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4441 it != m_Suballocations.end();
4444 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4445 it->hAllocation->CanBecomeLost() &&
4446 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4448 it = FreeSuballocation(it);
4449 ++lostAllocationCount;
4452 return lostAllocationCount;
4455 bool VmaDeviceMemoryBlock::CheckAllocation(
4456 uint32_t currentFrameIndex,
4457 uint32_t frameInUseCount,
4458 VkDeviceSize bufferImageGranularity,
4459 VkDeviceSize allocSize,
4460 VkDeviceSize allocAlignment,
4461 VmaSuballocationType allocType,
4462 VmaSuballocationList::const_iterator suballocItem,
4463 bool canMakeOtherLost,
4464 VkDeviceSize* pOffset,
4465 size_t* itemsToMakeLostCount,
4466 VkDeviceSize* pSumFreeSize,
4467 VkDeviceSize* pSumItemSize)
const 4469 VMA_ASSERT(allocSize > 0);
4470 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4471 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4472 VMA_ASSERT(pOffset != VMA_NULL);
4474 *itemsToMakeLostCount = 0;
4478 if(canMakeOtherLost)
4480 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4482 *pSumFreeSize = suballocItem->size;
4486 if(suballocItem->hAllocation->CanBecomeLost() &&
4487 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4489 ++*itemsToMakeLostCount;
4490 *pSumItemSize = suballocItem->size;
4499 if(m_Size - suballocItem->offset < allocSize)
4505 *pOffset = suballocItem->offset;
4508 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4510 *pOffset += VMA_DEBUG_MARGIN;
4514 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4515 *pOffset = VmaAlignUp(*pOffset, alignment);
4519 if(bufferImageGranularity > 1)
4521 bool bufferImageGranularityConflict =
false;
4522 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4523 while(prevSuballocItem != m_Suballocations.cbegin())
4526 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4527 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4529 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4531 bufferImageGranularityConflict =
true;
4539 if(bufferImageGranularityConflict)
4541 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4547 if(*pOffset >= suballocItem->offset + suballocItem->size)
4553 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
4556 VmaSuballocationList::const_iterator next = suballocItem;
4558 const VkDeviceSize requiredEndMargin =
4559 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4561 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
4563 if(suballocItem->offset + totalSize > m_Size)
4570 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
4571 if(totalSize > suballocItem->size)
4573 VkDeviceSize remainingSize = totalSize - suballocItem->size;
4574 while(remainingSize > 0)
4577 if(lastSuballocItem == m_Suballocations.cend())
4581 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4583 *pSumFreeSize += lastSuballocItem->size;
4587 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
4588 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
4589 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4591 ++*itemsToMakeLostCount;
4592 *pSumItemSize += lastSuballocItem->size;
4599 remainingSize = (lastSuballocItem->size < remainingSize) ?
4600 remainingSize - lastSuballocItem->size : 0;
4606 if(bufferImageGranularity > 1)
4608 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
4610 while(nextSuballocItem != m_Suballocations.cend())
4612 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4613 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4615 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4617 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
4618 if(nextSuballoc.hAllocation->CanBecomeLost() &&
4619 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4621 ++*itemsToMakeLostCount;
4640 const VmaSuballocation& suballoc = *suballocItem;
4641 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4643 *pSumFreeSize = suballoc.size;
4646 if(suballoc.size < allocSize)
4652 *pOffset = suballoc.offset;
4655 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4657 *pOffset += VMA_DEBUG_MARGIN;
4661 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4662 *pOffset = VmaAlignUp(*pOffset, alignment);
4666 if(bufferImageGranularity > 1)
4668 bool bufferImageGranularityConflict =
false;
4669 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4670 while(prevSuballocItem != m_Suballocations.cbegin())
4673 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4674 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4676 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4678 bufferImageGranularityConflict =
true;
4686 if(bufferImageGranularityConflict)
4688 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4693 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
4696 VmaSuballocationList::const_iterator next = suballocItem;
4698 const VkDeviceSize requiredEndMargin =
4699 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4702 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
4709 if(bufferImageGranularity > 1)
4711 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
4713 while(nextSuballocItem != m_Suballocations.cend())
4715 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4716 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4718 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4737 bool VmaDeviceMemoryBlock::IsEmpty()
const 4739 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4742 void VmaDeviceMemoryBlock::Alloc(
4743 const VmaAllocationRequest& request,
4744 VmaSuballocationType type,
4745 VkDeviceSize allocSize,
4746 VmaAllocation hAllocation)
4748 VMA_ASSERT(request.item != m_Suballocations.end());
4749 VmaSuballocation& suballoc = *request.item;
4751 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4753 VMA_ASSERT(request.offset >= suballoc.offset);
4754 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4755 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4756 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4760 UnregisterFreeSuballocation(request.item);
4762 suballoc.offset = request.offset;
4763 suballoc.size = allocSize;
4764 suballoc.type = type;
4765 suballoc.hAllocation = hAllocation;
4770 VmaSuballocation paddingSuballoc = {};
4771 paddingSuballoc.offset = request.offset + allocSize;
4772 paddingSuballoc.size = paddingEnd;
4773 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4774 VmaSuballocationList::iterator next = request.item;
4776 const VmaSuballocationList::iterator paddingEndItem =
4777 m_Suballocations.insert(next, paddingSuballoc);
4778 RegisterFreeSuballocation(paddingEndItem);
4784 VmaSuballocation paddingSuballoc = {};
4785 paddingSuballoc.offset = request.offset - paddingBegin;
4786 paddingSuballoc.size = paddingBegin;
4787 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4788 const VmaSuballocationList::iterator paddingBeginItem =
4789 m_Suballocations.insert(request.item, paddingSuballoc);
4790 RegisterFreeSuballocation(paddingBeginItem);
4794 m_FreeCount = m_FreeCount - 1;
4795 if(paddingBegin > 0)
4803 m_SumFreeSize -= allocSize;
4806 VmaSuballocationList::iterator VmaDeviceMemoryBlock::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
4809 VmaSuballocation& suballoc = *suballocItem;
4810 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4811 suballoc.hAllocation = VK_NULL_HANDLE;
4815 m_SumFreeSize += suballoc.size;
4818 bool mergeWithNext =
false;
4819 bool mergeWithPrev =
false;
4821 VmaSuballocationList::iterator nextItem = suballocItem;
4823 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
4825 mergeWithNext =
true;
4828 VmaSuballocationList::iterator prevItem = suballocItem;
4829 if(suballocItem != m_Suballocations.begin())
4832 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4834 mergeWithPrev =
true;
4840 UnregisterFreeSuballocation(nextItem);
4841 MergeFreeWithNext(suballocItem);
4846 UnregisterFreeSuballocation(prevItem);
4847 MergeFreeWithNext(prevItem);
4848 RegisterFreeSuballocation(prevItem);
4853 RegisterFreeSuballocation(suballocItem);
4854 return suballocItem;
4858 void VmaDeviceMemoryBlock::Free(
const VmaAllocation allocation)
4860 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4861 suballocItem != m_Suballocations.end();
4864 VmaSuballocation& suballoc = *suballocItem;
4865 if(suballoc.hAllocation == allocation)
4867 FreeSuballocation(suballocItem);
4868 VMA_HEAVY_ASSERT(Validate());
4872 VMA_ASSERT(0 &&
"Not found!");
4875 #if VMA_STATS_STRING_ENABLED 4877 void VmaDeviceMemoryBlock::PrintDetailedMap(
class VmaJsonWriter& json)
const 4881 json.WriteString(
"TotalBytes");
4882 json.WriteNumber(m_Size);
4884 json.WriteString(
"UnusedBytes");
4885 json.WriteNumber(m_SumFreeSize);
4887 json.WriteString(
"Allocations");
4888 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4890 json.WriteString(
"UnusedRanges");
4891 json.WriteNumber(m_FreeCount);
4893 json.WriteString(
"Suballocations");
4896 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4897 suballocItem != m_Suballocations.cend();
4898 ++suballocItem, ++i)
4900 json.BeginObject(
true);
4902 json.WriteString(
"Type");
4903 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4905 json.WriteString(
"Size");
4906 json.WriteNumber(suballocItem->size);
4908 json.WriteString(
"Offset");
4909 json.WriteNumber(suballocItem->offset);
4918 #endif // #if VMA_STATS_STRING_ENABLED 4920 void VmaDeviceMemoryBlock::MergeFreeWithNext(VmaSuballocationList::iterator item)
4922 VMA_ASSERT(item != m_Suballocations.end());
4923 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4925 VmaSuballocationList::iterator nextItem = item;
4927 VMA_ASSERT(nextItem != m_Suballocations.end());
4928 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
4930 item->size += nextItem->size;
4932 m_Suballocations.erase(nextItem);
4935 void VmaDeviceMemoryBlock::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
4937 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4938 VMA_ASSERT(item->size > 0);
4942 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
4944 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4946 if(m_FreeSuballocationsBySize.empty())
4948 m_FreeSuballocationsBySize.push_back(item);
4952 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
4960 void VmaDeviceMemoryBlock::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
4962 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4963 VMA_ASSERT(item->size > 0);
4967 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
4969 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4971 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4972 m_FreeSuballocationsBySize.data(),
4973 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
4975 VmaSuballocationItemSizeLess());
4976 for(
size_t index = it - m_FreeSuballocationsBySize.data();
4977 index < m_FreeSuballocationsBySize.size();
4980 if(m_FreeSuballocationsBySize[index] == item)
4982 VmaVectorRemove(m_FreeSuballocationsBySize, index);
4985 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
4987 VMA_ASSERT(0 &&
"Not found.");
4993 bool VmaDeviceMemoryBlock::ValidateFreeSuballocationList()
const 4995 VkDeviceSize lastSize = 0;
4996 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
4998 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5000 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5005 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5010 if(it->size < lastSize)
5016 lastSize = it->size;
5023 memset(&outInfo, 0,
sizeof(outInfo));
5028 static void CalcAllocationStatInfo(
VmaStatInfo& outInfo,
const VmaDeviceMemoryBlock& block)
5032 const uint32_t rangeCount = (uint32_t)block.m_Suballocations.size();
5044 for(VmaSuballocationList::const_iterator suballocItem = block.m_Suballocations.cbegin();
5045 suballocItem != block.m_Suballocations.cend();
5048 const VmaSuballocation& suballoc = *suballocItem;
5049 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5076 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5084 VmaPool_T::VmaPool_T(
5085 VmaAllocator hAllocator,
5089 createInfo.memoryTypeIndex,
5091 VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
5092 createInfo.blockSize,
5093 createInfo.minBlockCount,
5094 createInfo.maxBlockCount,
5096 createInfo.frameInUseCount,
5101 VmaPool_T::~VmaPool_T()
5105 #if VMA_STATS_STRING_ENABLED 5107 #endif // #if VMA_STATS_STRING_ENABLED 5109 VmaBlockVector::VmaBlockVector(
5110 VmaAllocator hAllocator,
5111 uint32_t memoryTypeIndex,
5112 VMA_BLOCK_VECTOR_TYPE blockVectorType,
5113 VkDeviceSize preferredBlockSize,
5114 size_t minBlockCount,
5115 size_t maxBlockCount,
5116 VkDeviceSize bufferImageGranularity,
5117 uint32_t frameInUseCount,
5118 bool isCustomPool) :
5119 m_hAllocator(hAllocator),
5120 m_MemoryTypeIndex(memoryTypeIndex),
5121 m_BlockVectorType(blockVectorType),
5122 m_PreferredBlockSize(preferredBlockSize),
5123 m_MinBlockCount(minBlockCount),
5124 m_MaxBlockCount(maxBlockCount),
5125 m_BufferImageGranularity(bufferImageGranularity),
5126 m_FrameInUseCount(frameInUseCount),
5127 m_IsCustomPool(isCustomPool),
5128 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5129 m_HasEmptyBlock(false),
5130 m_pDefragmentator(VMA_NULL)
5134 VmaBlockVector::~VmaBlockVector()
5136 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5138 for(
size_t i = m_Blocks.size(); i--; )
5140 m_Blocks[i]->Destroy(m_hAllocator);
5141 vma_delete(m_hAllocator, m_Blocks[i]);
5145 VkResult VmaBlockVector::CreateMinBlocks()
5147 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5149 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5150 if(res != VK_SUCCESS)
5158 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5165 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5167 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5169 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5171 VMA_HEAVY_ASSERT(pBlock->Validate());
5173 const uint32_t rangeCount = (uint32_t)pBlock->m_Suballocations.size();
5175 pStats->
size += pBlock->m_Size;
5182 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5184 VkResult VmaBlockVector::Allocate(
5185 VmaPool hCurrentPool,
5186 uint32_t currentFrameIndex,
5187 const VkMemoryRequirements& vkMemReq,
5189 VmaSuballocationType suballocType,
5190 VmaAllocation* pAllocation)
5194 (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED))
5196 VMA_ASSERT(0 &&
"Usage of VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT must match VMA_POOL_CREATE_PERSISTENT_MAP_BIT.");
5197 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5200 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5204 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5206 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5207 VMA_ASSERT(pCurrBlock);
5208 VmaAllocationRequest currRequest = {};
5209 if(pCurrBlock->CreateAllocationRequest(
5212 m_BufferImageGranularity,
5220 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5223 if(pCurrBlock->IsEmpty())
5225 m_HasEmptyBlock =
false;
5228 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5229 pCurrBlock->Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5230 (*pAllocation)->InitBlockAllocation(
5239 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5240 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5245 const bool canCreateNewBlock =
5247 (m_Blocks.size() < m_MaxBlockCount);
5250 if(canCreateNewBlock)
5253 VkDeviceSize blockSize = m_PreferredBlockSize;
5254 size_t newBlockIndex = 0;
5255 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5258 if(res < 0 && m_IsCustomPool ==
false)
5262 if(blockSize >= vkMemReq.size)
5264 res = CreateBlock(blockSize, &newBlockIndex);
5269 if(blockSize >= vkMemReq.size)
5271 res = CreateBlock(blockSize, &newBlockIndex);
5276 if(res == VK_SUCCESS)
5278 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5279 VMA_ASSERT(pBlock->m_Size >= vkMemReq.size);
5282 VmaAllocationRequest allocRequest = {};
5283 allocRequest.item = pBlock->m_Suballocations.begin();
5284 allocRequest.offset = 0;
5285 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5286 pBlock->Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5287 (*pAllocation)->InitBlockAllocation(
5290 allocRequest.offset,
5296 VMA_HEAVY_ASSERT(pBlock->Validate());
5297 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5306 if(canMakeOtherLost)
5308 uint32_t tryIndex = 0;
5309 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5311 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5312 VmaAllocationRequest bestRequest = {};
5313 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5317 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5319 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5320 VMA_ASSERT(pCurrBlock);
5321 VmaAllocationRequest currRequest = {};
5322 if(pCurrBlock->CreateAllocationRequest(
5325 m_BufferImageGranularity,
5332 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5333 if(pBestRequestBlock == VMA_NULL ||
5334 currRequestCost < bestRequestCost)
5336 pBestRequestBlock = pCurrBlock;
5337 bestRequest = currRequest;
5338 bestRequestCost = currRequestCost;
5340 if(bestRequestCost == 0)
5348 if(pBestRequestBlock != VMA_NULL)
5350 if(pBestRequestBlock->MakeRequestedAllocationsLost(
5356 if(pBestRequestBlock->IsEmpty())
5358 m_HasEmptyBlock =
false;
5361 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5362 pBestRequestBlock->Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5363 (*pAllocation)->InitBlockAllocation(
5372 VMA_HEAVY_ASSERT(pBlock->Validate());
5373 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5387 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5389 return VK_ERROR_TOO_MANY_OBJECTS;
5393 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5396 void VmaBlockVector::Free(
5397 VmaAllocation hAllocation)
5399 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5403 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5405 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5407 pBlock->Free(hAllocation);
5408 VMA_HEAVY_ASSERT(pBlock->Validate());
5410 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5413 if(pBlock->IsEmpty())
5416 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5418 pBlockToDelete = pBlock;
5424 m_HasEmptyBlock =
true;
5428 IncrementallySortBlocks();
5433 if(pBlockToDelete != VMA_NULL)
5435 VMA_DEBUG_LOG(
" Deleted empty allocation");
5436 pBlockToDelete->Destroy(m_hAllocator);
5437 vma_delete(m_hAllocator, pBlockToDelete);
5441 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5443 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5445 if(m_Blocks[blockIndex] == pBlock)
5447 VmaVectorRemove(m_Blocks, blockIndex);
5454 void VmaBlockVector::IncrementallySortBlocks()
5457 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5459 if(m_Blocks[i - 1]->m_SumFreeSize > m_Blocks[i]->m_SumFreeSize)
5461 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5467 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5469 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5470 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5471 allocInfo.allocationSize = blockSize;
5472 const VkDevice hDevice = m_hAllocator->m_hDevice;
5473 VkDeviceMemory mem = VK_NULL_HANDLE;
5474 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
5483 void* pMappedData = VMA_NULL;
5484 const bool persistentMap = (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED);
5485 if(persistentMap && m_hAllocator->m_UnmapPersistentlyMappedMemoryCounter == 0)
5487 res = vkMapMemory(hDevice, mem, 0, VK_WHOLE_SIZE, 0, &pMappedData);
5490 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
5491 m_hAllocator->FreeVulkanMemory(m_MemoryTypeIndex, blockSize, mem);
5497 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5500 (VMA_BLOCK_VECTOR_TYPE)m_BlockVectorType,
5502 allocInfo.allocationSize,
5506 m_Blocks.push_back(pBlock);
5507 if(pNewBlockIndex != VMA_NULL)
5509 *pNewBlockIndex = m_Blocks.size() - 1;
5515 #if VMA_STATS_STRING_ENABLED 5517 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5519 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5525 json.WriteString(
"MemoryTypeIndex");
5526 json.WriteNumber(m_MemoryTypeIndex);
5528 if(m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
5530 json.WriteString(
"Mapped");
5531 json.WriteBool(
true);
5534 json.WriteString(
"BlockSize");
5535 json.WriteNumber(m_PreferredBlockSize);
5537 json.WriteString(
"BlockCount");
5538 json.BeginObject(
true);
5539 if(m_MinBlockCount > 0)
5541 json.WriteString(
"Min");
5542 json.WriteNumber(m_MinBlockCount);
5544 if(m_MaxBlockCount < SIZE_MAX)
5546 json.WriteString(
"Max");
5547 json.WriteNumber(m_MaxBlockCount);
5549 json.WriteString(
"Cur");
5550 json.WriteNumber(m_Blocks.size());
5553 if(m_FrameInUseCount > 0)
5555 json.WriteString(
"FrameInUseCount");
5556 json.WriteNumber(m_FrameInUseCount);
5561 json.WriteString(
"PreferredBlockSize");
5562 json.WriteNumber(m_PreferredBlockSize);
5565 json.WriteString(
"Blocks");
5567 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5569 m_Blocks[i]->PrintDetailedMap(json);
5576 #endif // #if VMA_STATS_STRING_ENABLED 5578 void VmaBlockVector::UnmapPersistentlyMappedMemory()
5580 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5582 for(
size_t i = m_Blocks.size(); i--; )
5584 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5585 if(pBlock->m_pMappedData != VMA_NULL)
5587 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
5588 vkUnmapMemory(m_hAllocator->m_hDevice, pBlock->m_hMemory);
5589 pBlock->m_pMappedData = VMA_NULL;
5594 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
5596 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5598 VkResult finalResult = VK_SUCCESS;
5599 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
5601 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5602 if(pBlock->m_PersistentMap)
5604 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
5605 VkResult localResult = vkMapMemory(m_hAllocator->m_hDevice, pBlock->m_hMemory, 0, VK_WHOLE_SIZE, 0, &pBlock->m_pMappedData);
5606 if(localResult != VK_SUCCESS)
5608 finalResult = localResult;
5615 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5617 const VkAllocationCallbacks* pAllocationCallbacks,
5618 uint32_t currentFrameIndex)
5620 if(m_pDefragmentator == VMA_NULL)
5622 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5624 pAllocationCallbacks,
5629 return m_pDefragmentator;
5632 VkResult VmaBlockVector::Defragment(
5634 VkDeviceSize& maxBytesToMove,
5635 uint32_t& maxAllocationsToMove)
5637 if(m_pDefragmentator == VMA_NULL)
5642 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5645 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5648 if(pDefragmentationStats != VMA_NULL)
5650 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5651 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
5654 VMA_ASSERT(bytesMoved <= maxBytesToMove);
5655 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
5661 m_HasEmptyBlock =
false;
5662 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
5664 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
5665 if(pBlock->IsEmpty())
5667 if(m_Blocks.size() > m_MinBlockCount)
5669 if(pDefragmentationStats != VMA_NULL)
5672 pDefragmentationStats->
bytesFreed += pBlock->m_Size;
5675 VmaVectorRemove(m_Blocks, blockIndex);
5676 pBlock->Destroy(m_hAllocator);
5677 vma_delete(m_hAllocator, pBlock);
5681 m_HasEmptyBlock =
true;
5689 void VmaBlockVector::DestroyDefragmentator()
5691 if(m_pDefragmentator != VMA_NULL)
5693 vma_delete(m_hAllocator, m_pDefragmentator);
5694 m_pDefragmentator = VMA_NULL;
5698 void VmaBlockVector::MakePoolAllocationsLost(
5699 uint32_t currentFrameIndex,
5700 size_t* pLostAllocationCount)
5702 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5704 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5706 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5708 pBlock->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
5712 void VmaBlockVector::AddStats(
VmaStats* pStats)
5714 const uint32_t memTypeIndex = m_MemoryTypeIndex;
5715 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
5717 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5719 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5721 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5723 VMA_HEAVY_ASSERT(pBlock->Validate());
5725 CalcAllocationStatInfo(allocationStatInfo, *pBlock);
5726 VmaAddStatInfo(pStats->
total, allocationStatInfo);
5727 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
5728 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
5735 VmaDefragmentator::VmaDefragmentator(
5737 const VkAllocationCallbacks* pAllocationCallbacks,
5738 VmaBlockVector* pBlockVector,
5739 uint32_t currentFrameIndex) :
5741 m_pAllocationCallbacks(pAllocationCallbacks),
5742 m_pBlockVector(pBlockVector),
5743 m_CurrentFrameIndex(currentFrameIndex),
5745 m_AllocationsMoved(0),
5746 m_Allocations(VmaStlAllocator<AllocationInfo>(pAllocationCallbacks)),
5747 m_Blocks(VmaStlAllocator<BlockInfo*>(pAllocationCallbacks))
5751 VmaDefragmentator::~VmaDefragmentator()
5753 for(
size_t i = m_Blocks.size(); i--; )
5755 vma_delete(m_pAllocationCallbacks, m_Blocks[i]);
5759 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
5761 AllocationInfo allocInfo;
5762 allocInfo.m_hAllocation = hAlloc;
5763 allocInfo.m_pChanged = pChanged;
5764 m_Allocations.push_back(allocInfo);
5767 VkResult VmaDefragmentator::DefragmentRound(
5768 VkDeviceSize maxBytesToMove,
5769 uint32_t maxAllocationsToMove)
5771 if(m_Blocks.empty())
5776 size_t srcBlockIndex = m_Blocks.size() - 1;
5777 size_t srcAllocIndex = SIZE_MAX;
5783 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
5785 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
5788 if(srcBlockIndex == 0)
5795 srcAllocIndex = SIZE_MAX;
5800 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
5804 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
5805 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
5807 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
5808 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
5809 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
5810 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
5813 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
5815 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
5816 VmaAllocationRequest dstAllocRequest;
5817 if(pDstBlockInfo->m_pBlock->CreateAllocationRequest(
5818 m_CurrentFrameIndex,
5819 m_pBlockVector->GetFrameInUseCount(),
5820 m_pBlockVector->GetBufferImageGranularity(),
5825 &dstAllocRequest) &&
5827 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
5829 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
5832 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
5833 (m_BytesMoved + size > maxBytesToMove))
5835 return VK_INCOMPLETE;
5838 void* pDstMappedData = VMA_NULL;
5839 VkResult res = pDstBlockInfo->EnsureMapping(m_hDevice, &pDstMappedData);
5840 if(res != VK_SUCCESS)
5845 void* pSrcMappedData = VMA_NULL;
5846 res = pSrcBlockInfo->EnsureMapping(m_hDevice, &pSrcMappedData);
5847 if(res != VK_SUCCESS)
5854 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
5855 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
5856 static_cast<size_t>(size));
5858 pDstBlockInfo->m_pBlock->Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
5859 pSrcBlockInfo->m_pBlock->Free(allocInfo.m_hAllocation);
5861 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
5863 if(allocInfo.m_pChanged != VMA_NULL)
5865 *allocInfo.m_pChanged = VK_TRUE;
5868 ++m_AllocationsMoved;
5869 m_BytesMoved += size;
5871 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
5879 if(srcAllocIndex > 0)
5885 if(srcBlockIndex > 0)
5888 srcAllocIndex = SIZE_MAX;
5898 VkResult VmaDefragmentator::Defragment(
5899 VkDeviceSize maxBytesToMove,
5900 uint32_t maxAllocationsToMove)
5902 if(m_Allocations.empty())
5908 const size_t blockCount = m_pBlockVector->m_Blocks.size();
5909 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
5911 BlockInfo* pBlockInfo = vma_new(m_pAllocationCallbacks, BlockInfo)(m_pAllocationCallbacks);
5912 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
5913 m_Blocks.push_back(pBlockInfo);
5917 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
5920 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
5922 AllocationInfo& allocInfo = m_Allocations[blockIndex];
5924 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
5926 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
5927 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
5928 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
5930 (*it)->m_Allocations.push_back(allocInfo);
5938 m_Allocations.clear();
5940 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
5942 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
5943 pBlockInfo->CalcHasNonMovableAllocations();
5944 pBlockInfo->SortAllocationsBySizeDescecnding();
5948 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
5951 VkResult result = VK_SUCCESS;
5952 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
5954 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
5958 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
5960 m_Blocks[blockIndex]->Unmap(m_hDevice);
5966 bool VmaDefragmentator::MoveMakesSense(
5967 size_t dstBlockIndex, VkDeviceSize dstOffset,
5968 size_t srcBlockIndex, VkDeviceSize srcOffset)
5970 if(dstBlockIndex < srcBlockIndex)
5974 if(dstBlockIndex > srcBlockIndex)
5978 if(dstOffset < srcOffset)
5990 m_PhysicalDevice(pCreateInfo->physicalDevice),
5991 m_hDevice(pCreateInfo->device),
5992 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
5993 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
5994 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
5995 m_UnmapPersistentlyMappedMemoryCounter(0),
5996 m_PreferredLargeHeapBlockSize(0),
5997 m_PreferredSmallHeapBlockSize(0),
5998 m_CurrentFrameIndex(0),
5999 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6003 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6004 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6005 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6007 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6008 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
6010 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6012 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6021 vkGetPhysicalDeviceProperties(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6022 vkGetPhysicalDeviceMemoryProperties(m_PhysicalDevice, &m_MemProps);
6031 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6033 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6034 if(limit != VK_WHOLE_SIZE)
6036 m_HeapSizeLimit[heapIndex] = limit;
6037 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6039 m_MemProps.memoryHeaps[heapIndex].size = limit;
6045 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6047 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6049 for(
size_t blockVectorTypeIndex = 0; blockVectorTypeIndex < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorTypeIndex)
6051 m_pBlockVectors[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, VmaBlockVector)(
6054 static_cast<VMA_BLOCK_VECTOR_TYPE
>(blockVectorTypeIndex),
6058 GetBufferImageGranularity(),
6063 m_pOwnAllocations[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6068 VmaAllocator_T::~VmaAllocator_T()
6070 VMA_ASSERT(m_Pools.empty());
6072 for(
size_t i = GetMemoryTypeCount(); i--; )
6074 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
6076 vma_delete(
this, m_pOwnAllocations[i][j]);
6077 vma_delete(
this, m_pBlockVectors[i][j]);
6082 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6084 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6085 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6086 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6087 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6090 VkResult VmaAllocator_T::AllocateMemoryOfType(
6091 const VkMemoryRequirements& vkMemReq,
6093 uint32_t memTypeIndex,
6094 VmaSuballocationType suballocType,
6095 VmaAllocation* pAllocation)
6097 VMA_ASSERT(pAllocation != VMA_NULL);
6098 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6100 uint32_t blockVectorType = VmaAllocationCreateFlagsToBlockVectorType(createInfo.
flags);
6101 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6102 VMA_ASSERT(blockVector);
6104 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6106 const bool ownMemory =
6108 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
6110 vkMemReq.size > preferredBlockSize / 2);
6116 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6120 return AllocateOwnMemory(
6131 VkResult res = blockVector->Allocate(
6133 m_CurrentFrameIndex.load(),
6138 if(res == VK_SUCCESS)
6144 res = AllocateOwnMemory(
6149 createInfo.pUserData,
6151 if(res == VK_SUCCESS)
6154 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
6160 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6166 VkResult VmaAllocator_T::AllocateOwnMemory(
6168 VmaSuballocationType suballocType,
6169 uint32_t memTypeIndex,
6172 VmaAllocation* pAllocation)
6174 VMA_ASSERT(pAllocation);
6176 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6177 allocInfo.memoryTypeIndex = memTypeIndex;
6178 allocInfo.allocationSize = size;
6181 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6182 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6185 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6189 void* pMappedData =
nullptr;
6192 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
6194 res = vkMapMemory(m_hDevice, hMemory, 0, VK_WHOLE_SIZE, 0, &pMappedData);
6197 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6198 FreeVulkanMemory(memTypeIndex, size, hMemory);
6204 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6205 (*pAllocation)->InitOwnAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
6209 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6210 AllocationVectorType* pOwnAllocations = m_pOwnAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
6211 VMA_ASSERT(pOwnAllocations);
6212 VmaVectorInsertSorted<VmaPointerLess>(*pOwnAllocations, *pAllocation);
6215 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
6220 VkResult VmaAllocator_T::AllocateMemory(
6221 const VkMemoryRequirements& vkMemReq,
6223 VmaSuballocationType suballocType,
6224 VmaAllocation* pAllocation)
6229 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6230 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6232 if((createInfo.
pool != VK_NULL_HANDLE) &&
6235 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT when pool != null is invalid.");
6236 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6239 if(createInfo.
pool != VK_NULL_HANDLE)
6241 return createInfo.
pool->m_BlockVector.Allocate(
6243 m_CurrentFrameIndex.load(),
6252 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6253 uint32_t memTypeIndex = UINT32_MAX;
6255 if(res == VK_SUCCESS)
6257 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6259 if(res == VK_SUCCESS)
6269 memoryTypeBits &= ~(1u << memTypeIndex);
6272 if(res == VK_SUCCESS)
6274 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6276 if(res == VK_SUCCESS)
6286 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6297 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6299 VMA_ASSERT(allocation);
6301 if(allocation->CanBecomeLost() ==
false ||
6302 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6304 switch(allocation->GetType())
6306 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6308 VmaBlockVector* pBlockVector = VMA_NULL;
6309 VmaPool hPool = allocation->GetPool();
6310 if(hPool != VK_NULL_HANDLE)
6312 pBlockVector = &hPool->m_BlockVector;
6316 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6317 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
6318 pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6320 pBlockVector->Free(allocation);
6323 case VmaAllocation_T::ALLOCATION_TYPE_OWN:
6324 FreeOwnMemory(allocation);
6331 vma_delete(
this, allocation);
6334 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6337 InitStatInfo(pStats->
total);
6338 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6340 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6344 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6346 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6347 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6349 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6350 VMA_ASSERT(pBlockVector);
6351 pBlockVector->AddStats(pStats);
6357 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6358 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6360 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
6365 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6367 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6368 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6369 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6371 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6372 VMA_ASSERT(pOwnAllocVector);
6373 for(
size_t allocIndex = 0, allocCount = pOwnAllocVector->size(); allocIndex < allocCount; ++allocIndex)
6376 (*pOwnAllocVector)[allocIndex]->OwnAllocCalcStatsInfo(allocationStatInfo);
6377 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6378 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6379 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6385 VmaPostprocessCalcStatInfo(pStats->
total);
6386 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6387 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6388 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6389 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6392 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6394 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
6396 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
6398 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6400 for(uint32_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
6402 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6403 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6404 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6408 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6409 AllocationVectorType* pOwnAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6410 for(
size_t ownAllocIndex = pOwnAllocationsVector->size(); ownAllocIndex--; )
6412 VmaAllocation hAlloc = (*pOwnAllocationsVector)[ownAllocIndex];
6413 hAlloc->OwnAllocUnmapPersistentlyMappedMemory(m_hDevice);
6419 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6420 pBlockVector->UnmapPersistentlyMappedMemory();
6427 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6428 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6430 m_Pools[poolIndex]->GetBlockVector().UnmapPersistentlyMappedMemory();
6437 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
6439 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
6440 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
6442 VkResult finalResult = VK_SUCCESS;
6443 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6447 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6448 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6450 m_Pools[poolIndex]->GetBlockVector().MapPersistentlyMappedMemory();
6454 for(uint32_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
6456 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6457 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6458 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6462 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6463 AllocationVectorType* pAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6464 for(
size_t ownAllocIndex = 0, ownAllocCount = pAllocationsVector->size(); ownAllocIndex < ownAllocCount; ++ownAllocIndex)
6466 VmaAllocation hAlloc = (*pAllocationsVector)[ownAllocIndex];
6467 hAlloc->OwnAllocMapPersistentlyMappedMemory(m_hDevice);
6473 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6474 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
6475 if(localResult != VK_SUCCESS)
6477 finalResult = localResult;
6489 VkResult VmaAllocator_T::Defragment(
6490 VmaAllocation* pAllocations,
6491 size_t allocationCount,
6492 VkBool32* pAllocationsChanged,
6496 if(pAllocationsChanged != VMA_NULL)
6498 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
6500 if(pDefragmentationStats != VMA_NULL)
6502 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
6505 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
6507 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
6508 return VK_ERROR_MEMORY_MAP_FAILED;
6511 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
6513 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
6515 const size_t poolCount = m_Pools.size();
6518 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
6520 VmaAllocation hAlloc = pAllocations[allocIndex];
6522 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
6524 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
6526 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
6528 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
6530 VmaBlockVector* pAllocBlockVector =
nullptr;
6532 const VmaPool hAllocPool = hAlloc->GetPool();
6534 if(hAllocPool != VK_NULL_HANDLE)
6536 pAllocBlockVector = &hAllocPool->GetBlockVector();
6541 pAllocBlockVector = m_pBlockVectors[memTypeIndex][hAlloc->GetBlockVectorType()];
6544 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
6546 GetAllocationCallbacks(),
6549 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
6550 &pAllocationsChanged[allocIndex] : VMA_NULL;
6551 pDefragmentator->AddAllocation(hAlloc, pChanged);
6555 VkResult result = VK_SUCCESS;
6559 VkDeviceSize maxBytesToMove = SIZE_MAX;
6560 uint32_t maxAllocationsToMove = UINT32_MAX;
6561 if(pDefragmentationInfo != VMA_NULL)
6568 for(uint32_t memTypeIndex = 0;
6569 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
6573 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6575 for(uint32_t blockVectorType = 0;
6576 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
6579 result = m_pBlockVectors[memTypeIndex][blockVectorType]->Defragment(
6580 pDefragmentationStats,
6582 maxAllocationsToMove);
6588 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
6590 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
6591 pDefragmentationStats,
6593 maxAllocationsToMove);
6599 for(
size_t poolIndex = poolCount; poolIndex--; )
6601 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
6605 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
6607 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6609 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
6611 m_pBlockVectors[memTypeIndex][blockVectorType]->DestroyDefragmentator();
6619 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
6621 if(hAllocation->CanBecomeLost())
6627 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
6628 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
6631 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6635 pAllocationInfo->
offset = 0;
6636 pAllocationInfo->
size = hAllocation->GetSize();
6638 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6641 else if(localLastUseFrameIndex == localCurrFrameIndex)
6643 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6644 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6645 pAllocationInfo->
offset = hAllocation->GetOffset();
6646 pAllocationInfo->
size = hAllocation->GetSize();
6647 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6648 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6653 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
6655 localLastUseFrameIndex = localCurrFrameIndex;
6663 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6664 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6665 pAllocationInfo->
offset = hAllocation->GetOffset();
6666 pAllocationInfo->
size = hAllocation->GetSize();
6667 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6668 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6672 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
6674 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
6687 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
6689 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
6690 if(res != VK_SUCCESS)
6692 vma_delete(
this, *pPool);
6699 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6700 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
6706 void VmaAllocator_T::DestroyPool(VmaPool pool)
6710 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6711 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
6712 VMA_ASSERT(success &&
"Pool not found in Allocator.");
6715 vma_delete(
this, pool);
6718 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
6720 pool->m_BlockVector.GetPoolStats(pPoolStats);
6723 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
6725 m_CurrentFrameIndex.store(frameIndex);
6728 void VmaAllocator_T::MakePoolAllocationsLost(
6730 size_t* pLostAllocationCount)
6732 hPool->m_BlockVector.MakePoolAllocationsLost(
6733 m_CurrentFrameIndex.load(),
6734 pLostAllocationCount);
6737 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
6739 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
6740 (*pAllocation)->InitLost();
6743 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
6745 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
6748 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6750 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6751 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
6753 res = vkAllocateMemory(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6754 if(res == VK_SUCCESS)
6756 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
6761 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
6766 res = vkAllocateMemory(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6769 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
6771 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
6777 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
6779 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
6781 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
6784 vkFreeMemory(m_hDevice, hMemory, GetAllocationCallbacks());
6786 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
6787 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6789 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6790 m_HeapSizeLimit[heapIndex] += size;
6794 void VmaAllocator_T::FreeOwnMemory(VmaAllocation allocation)
6796 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_OWN);
6798 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6800 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6801 AllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex][allocation->GetBlockVectorType()];
6802 VMA_ASSERT(pOwnAllocations);
6803 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pOwnAllocations, allocation);
6804 VMA_ASSERT(success);
6807 VkDeviceMemory hMemory = allocation->GetMemory();
6809 if(allocation->GetMappedData() != VMA_NULL)
6811 vkUnmapMemory(m_hDevice, hMemory);
6814 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
6816 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
6819 #if VMA_STATS_STRING_ENABLED 6821 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
6823 bool ownAllocationsStarted =
false;
6824 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6826 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6827 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6829 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6830 VMA_ASSERT(pOwnAllocVector);
6831 if(pOwnAllocVector->empty() ==
false)
6833 if(ownAllocationsStarted ==
false)
6835 ownAllocationsStarted =
true;
6836 json.WriteString(
"OwnAllocations");
6840 json.BeginString(
"Type ");
6841 json.ContinueString(memTypeIndex);
6842 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
6844 json.ContinueString(
" Mapped");
6850 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
6852 const VmaAllocation hAlloc = (*pOwnAllocVector)[i];
6853 json.BeginObject(
true);
6855 json.WriteString(
"Size");
6856 json.WriteNumber(hAlloc->GetSize());
6858 json.WriteString(
"Type");
6859 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
6868 if(ownAllocationsStarted)
6874 bool allocationsStarted =
false;
6875 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6877 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6879 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
6881 if(allocationsStarted ==
false)
6883 allocationsStarted =
true;
6884 json.WriteString(
"DefaultPools");
6888 json.BeginString(
"Type ");
6889 json.ContinueString(memTypeIndex);
6890 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
6892 json.ContinueString(
" Mapped");
6896 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(json);
6900 if(allocationsStarted)
6907 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6908 const size_t poolCount = m_Pools.size();
6911 json.WriteString(
"Pools");
6913 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
6915 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
6922 #endif // #if VMA_STATS_STRING_ENABLED 6924 static VkResult AllocateMemoryForImage(
6925 VmaAllocator allocator,
6928 VmaSuballocationType suballocType,
6929 VmaAllocation* pAllocation)
6931 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
6933 VkMemoryRequirements vkMemReq = {};
6934 vkGetImageMemoryRequirements(allocator->m_hDevice, image, &vkMemReq);
6936 return allocator->AllocateMemory(
6938 *pAllocationCreateInfo,
6948 VmaAllocator* pAllocator)
6950 VMA_ASSERT(pCreateInfo && pAllocator);
6951 VMA_DEBUG_LOG(
"vmaCreateAllocator");
6957 VmaAllocator allocator)
6959 if(allocator != VK_NULL_HANDLE)
6961 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
6962 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
6963 vma_delete(&allocationCallbacks, allocator);
6968 VmaAllocator allocator,
6969 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
6971 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
6972 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
6976 VmaAllocator allocator,
6977 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
6979 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
6980 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
6984 VmaAllocator allocator,
6985 uint32_t memoryTypeIndex,
6986 VkMemoryPropertyFlags* pFlags)
6988 VMA_ASSERT(allocator && pFlags);
6989 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
6990 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
6994 VmaAllocator allocator,
6995 uint32_t frameIndex)
6997 VMA_ASSERT(allocator);
6998 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7000 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7002 allocator->SetCurrentFrameIndex(frameIndex);
7006 VmaAllocator allocator,
7009 VMA_ASSERT(allocator && pStats);
7010 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7011 allocator->CalculateStats(pStats);
7014 #if VMA_STATS_STRING_ENABLED 7017 VmaAllocator allocator,
7018 char** ppStatsString,
7019 VkBool32 detailedMap)
7021 VMA_ASSERT(allocator && ppStatsString);
7022 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7024 VmaStringBuilder sb(allocator);
7026 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7030 allocator->CalculateStats(&stats);
7032 json.WriteString(
"Total");
7033 VmaPrintStatInfo(json, stats.
total);
7035 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7037 json.BeginString(
"Heap ");
7038 json.ContinueString(heapIndex);
7042 json.WriteString(
"Size");
7043 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7045 json.WriteString(
"Flags");
7046 json.BeginArray(
true);
7047 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7049 json.WriteString(
"DEVICE_LOCAL");
7055 json.WriteString(
"Stats");
7056 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7059 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7061 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7063 json.BeginString(
"Type ");
7064 json.ContinueString(typeIndex);
7069 json.WriteString(
"Flags");
7070 json.BeginArray(
true);
7071 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7072 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7074 json.WriteString(
"DEVICE_LOCAL");
7076 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7078 json.WriteString(
"HOST_VISIBLE");
7080 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7082 json.WriteString(
"HOST_COHERENT");
7084 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7086 json.WriteString(
"HOST_CACHED");
7088 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7090 json.WriteString(
"LAZILY_ALLOCATED");
7096 json.WriteString(
"Stats");
7097 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7106 if(detailedMap == VK_TRUE)
7108 allocator->PrintDetailedMap(json);
7114 const size_t len = sb.GetLength();
7115 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7118 memcpy(pChars, sb.GetData(), len);
7121 *ppStatsString = pChars;
7125 VmaAllocator allocator,
7128 if(pStatsString != VMA_NULL)
7130 VMA_ASSERT(allocator);
7131 size_t len = strlen(pStatsString);
7132 vma_delete_array(allocator, pStatsString, len + 1);
7136 #endif // #if VMA_STATS_STRING_ENABLED 7141 VmaAllocator allocator,
7142 uint32_t memoryTypeBits,
7144 uint32_t* pMemoryTypeIndex)
7146 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7147 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7148 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7150 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7152 if(preferredFlags == 0)
7154 preferredFlags = requiredFlags;
7157 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7160 switch(pAllocationCreateInfo->
usage)
7165 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7168 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7171 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7172 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7175 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7176 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7184 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7187 *pMemoryTypeIndex = UINT32_MAX;
7188 uint32_t minCost = UINT32_MAX;
7189 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7190 memTypeIndex < allocator->GetMemoryTypeCount();
7191 ++memTypeIndex, memTypeBit <<= 1)
7194 if((memTypeBit & memoryTypeBits) != 0)
7196 const VkMemoryPropertyFlags currFlags =
7197 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7199 if((requiredFlags & ~currFlags) == 0)
7202 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7204 if(currCost < minCost)
7206 *pMemoryTypeIndex = memTypeIndex;
7216 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7220 VmaAllocator allocator,
7224 VMA_ASSERT(allocator && pCreateInfo && pPool);
7226 VMA_DEBUG_LOG(
"vmaCreatePool");
7228 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7230 return allocator->CreatePool(pCreateInfo, pPool);
7234 VmaAllocator allocator,
7237 VMA_ASSERT(allocator && pool);
7239 VMA_DEBUG_LOG(
"vmaDestroyPool");
7241 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7243 allocator->DestroyPool(pool);
7247 VmaAllocator allocator,
7251 VMA_ASSERT(allocator && pool && pPoolStats);
7253 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7255 allocator->GetPoolStats(pool, pPoolStats);
7259 VmaAllocator allocator,
7261 size_t* pLostAllocationCount)
7263 VMA_ASSERT(allocator && pool);
7265 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7267 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7271 VmaAllocator allocator,
7272 const VkMemoryRequirements* pVkMemoryRequirements,
7274 VmaAllocation* pAllocation,
7277 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7279 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7281 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7283 VkResult result = allocator->AllocateMemory(
7284 *pVkMemoryRequirements,
7286 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7289 if(pAllocationInfo && result == VK_SUCCESS)
7291 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7298 VmaAllocator allocator,
7301 VmaAllocation* pAllocation,
7304 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7306 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7308 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7310 VkMemoryRequirements vkMemReq = {};
7311 vkGetBufferMemoryRequirements(allocator->m_hDevice, buffer, &vkMemReq);
7313 VkResult result = allocator->AllocateMemory(
7316 VMA_SUBALLOCATION_TYPE_BUFFER,
7319 if(pAllocationInfo && result == VK_SUCCESS)
7321 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7328 VmaAllocator allocator,
7331 VmaAllocation* pAllocation,
7334 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7336 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7338 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7340 VkResult result = AllocateMemoryForImage(
7344 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7347 if(pAllocationInfo && result == VK_SUCCESS)
7349 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7356 VmaAllocator allocator,
7357 VmaAllocation allocation)
7359 VMA_ASSERT(allocator && allocation);
7361 VMA_DEBUG_LOG(
"vmaFreeMemory");
7363 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7365 allocator->FreeMemory(allocation);
7369 VmaAllocator allocator,
7370 VmaAllocation allocation,
7373 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7375 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7377 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7381 VmaAllocator allocator,
7382 VmaAllocation allocation,
7385 VMA_ASSERT(allocator && allocation);
7387 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7389 allocation->SetUserData(pUserData);
7393 VmaAllocator allocator,
7394 VmaAllocation* pAllocation)
7396 VMA_ASSERT(allocator && pAllocation);
7398 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7400 allocator->CreateLostAllocation(pAllocation);
7404 VmaAllocator allocator,
7405 VmaAllocation allocation,
7408 VMA_ASSERT(allocator && allocation && ppData);
7410 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7412 return vkMapMemory(allocator->m_hDevice, allocation->GetMemory(),
7413 allocation->GetOffset(), allocation->GetSize(), 0, ppData);
7417 VmaAllocator allocator,
7418 VmaAllocation allocation)
7420 VMA_ASSERT(allocator && allocation);
7422 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7424 vkUnmapMemory(allocator->m_hDevice, allocation->GetMemory());
7429 VMA_ASSERT(allocator);
7431 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7433 allocator->UnmapPersistentlyMappedMemory();
7438 VMA_ASSERT(allocator);
7440 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7442 return allocator->MapPersistentlyMappedMemory();
7446 VmaAllocator allocator,
7447 VmaAllocation* pAllocations,
7448 size_t allocationCount,
7449 VkBool32* pAllocationsChanged,
7453 VMA_ASSERT(allocator && pAllocations);
7455 VMA_DEBUG_LOG(
"vmaDefragment");
7457 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7459 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
7463 VmaAllocator allocator,
7464 const VkBufferCreateInfo* pBufferCreateInfo,
7467 VmaAllocation* pAllocation,
7470 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
7472 VMA_DEBUG_LOG(
"vmaCreateBuffer");
7474 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7476 *pBuffer = VK_NULL_HANDLE;
7477 *pAllocation = VK_NULL_HANDLE;
7480 VkResult res = vkCreateBuffer(allocator->m_hDevice, pBufferCreateInfo, allocator->GetAllocationCallbacks(), pBuffer);
7484 VkMemoryRequirements vkMemReq = {};
7485 vkGetBufferMemoryRequirements(allocator->m_hDevice, *pBuffer, &vkMemReq);
7488 res = allocator->AllocateMemory(
7490 *pAllocationCreateInfo,
7491 VMA_SUBALLOCATION_TYPE_BUFFER,
7496 res = vkBindBufferMemory(allocator->m_hDevice, *pBuffer, (*pAllocation)->GetMemory(), (*pAllocation)->GetOffset());
7500 if(pAllocationInfo != VMA_NULL)
7502 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7506 allocator->FreeMemory(*pAllocation);
7507 *pAllocation = VK_NULL_HANDLE;
7510 vkDestroyBuffer(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
7511 *pBuffer = VK_NULL_HANDLE;
7518 VmaAllocator allocator,
7520 VmaAllocation allocation)
7522 if(buffer != VK_NULL_HANDLE)
7524 VMA_ASSERT(allocator);
7526 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
7528 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7530 vkDestroyBuffer(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
7532 allocator->FreeMemory(allocation);
7537 VmaAllocator allocator,
7538 const VkImageCreateInfo* pImageCreateInfo,
7541 VmaAllocation* pAllocation,
7544 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
7546 VMA_DEBUG_LOG(
"vmaCreateImage");
7548 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7550 *pImage = VK_NULL_HANDLE;
7551 *pAllocation = VK_NULL_HANDLE;
7554 VkResult res = vkCreateImage(allocator->m_hDevice, pImageCreateInfo, allocator->GetAllocationCallbacks(), pImage);
7557 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
7558 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
7559 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
7562 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
7566 res = vkBindImageMemory(allocator->m_hDevice, *pImage, (*pAllocation)->GetMemory(), (*pAllocation)->GetOffset());
7570 if(pAllocationInfo != VMA_NULL)
7572 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7576 allocator->FreeMemory(*pAllocation);
7577 *pAllocation = VK_NULL_HANDLE;
7580 vkDestroyImage(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
7581 *pImage = VK_NULL_HANDLE;
7588 VmaAllocator allocator,
7590 VmaAllocation allocation)
7592 if(image != VK_NULL_HANDLE)
7594 VMA_ASSERT(allocator);
7596 VMA_DEBUG_LOG(
"vmaDestroyImage");
7598 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7600 vkDestroyImage(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
7602 allocator->FreeMemory(allocation);
7606 #endif // #ifdef VMA_IMPLEMENTATION VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:473
Definition: vk_mem_alloc.h:790
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
uint32_t BlockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:574
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:641
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:911
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1061
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:842
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:690
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:723
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:436
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:485
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:792
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:467
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:482
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:464
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1065
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:502
VmaStatInfo total
Definition: vk_mem_alloc.h:592
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1073
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:706
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1056
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:476
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:796
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:921
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:725
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:812
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:848
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:799
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
Definition: vk_mem_alloc.h:699
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1051
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VkDeviceSize AllocationSizeMax
Definition: vk_mem_alloc.h:583
Definition: vk_mem_alloc.h:770
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1069
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:588
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:679
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1071
VmaMemoryUsage
Definition: vk_mem_alloc.h:627
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:717
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:460
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VmaAllocatorFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:455
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:571
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:807
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:447
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:451
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:802
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:584
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:430
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:712
Definition: vk_mem_alloc.h:703
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:820
VkDeviceSize AllocationSizeMin
Definition: vk_mem_alloc.h:583
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:488
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:851
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:730
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:520
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:590
VkDeviceSize AllocationSizeAvg
Definition: vk_mem_alloc.h:583
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
uint32_t AllocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:576
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:449
Definition: vk_mem_alloc.h:697
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:834
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:470
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
VkDeviceSize UsedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:580
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:932
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:658
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:479
uint32_t UnusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:578
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:839
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:635
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:584
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:916
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1067
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:701
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:761
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:927
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
No intended memory usage specified.
Definition: vk_mem_alloc.h:630
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
Definition: vk_mem_alloc.h:642
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:897
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:638
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:646
Definition: vk_mem_alloc.h:462
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:669
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:632
struct VmaStatInfo VmaStatInfo
VkDeviceSize UnusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:582
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:591
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:845
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:788
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:584
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:902
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.