23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 429 #include <vulkan/vulkan.h> 436 VK_DEFINE_HANDLE(VmaAllocator)
440 VmaAllocator allocator,
442 VkDeviceMemory memory,
446 VmaAllocator allocator,
448 VkDeviceMemory memory,
564 VmaAllocator* pAllocator);
568 VmaAllocator allocator);
575 VmaAllocator allocator,
576 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
583 VmaAllocator allocator,
584 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
593 VmaAllocator allocator,
594 uint32_t memoryTypeIndex,
595 VkMemoryPropertyFlags* pFlags);
606 VmaAllocator allocator,
607 uint32_t frameIndex);
635 VmaAllocator allocator,
638 #define VMA_STATS_STRING_ENABLED 1 640 #if VMA_STATS_STRING_ENABLED 646 VmaAllocator allocator,
647 char** ppStatsString,
648 VkBool32 detailedMap);
651 VmaAllocator allocator,
654 #endif // #if VMA_STATS_STRING_ENABLED 663 VK_DEFINE_HANDLE(VmaPool)
786 VmaAllocator allocator,
787 uint32_t memoryTypeBits,
789 uint32_t* pMemoryTypeIndex);
899 VmaAllocator allocator,
906 VmaAllocator allocator,
916 VmaAllocator allocator,
927 VmaAllocator allocator,
929 size_t* pLostAllocationCount);
931 VK_DEFINE_HANDLE(VmaAllocation)
984 VmaAllocator allocator,
985 const VkMemoryRequirements* pVkMemoryRequirements,
987 VmaAllocation* pAllocation,
997 VmaAllocator allocator,
1000 VmaAllocation* pAllocation,
1005 VmaAllocator allocator,
1008 VmaAllocation* pAllocation,
1013 VmaAllocator allocator,
1014 VmaAllocation allocation);
1018 VmaAllocator allocator,
1019 VmaAllocation allocation,
1024 VmaAllocator allocator,
1025 VmaAllocation allocation,
1039 VmaAllocator allocator,
1040 VmaAllocation* pAllocation);
1051 VmaAllocator allocator,
1052 VmaAllocation allocation,
1056 VmaAllocator allocator,
1057 VmaAllocation allocation);
1185 VmaAllocator allocator,
1186 VmaAllocation* pAllocations,
1187 size_t allocationCount,
1188 VkBool32* pAllocationsChanged,
1218 VmaAllocator allocator,
1219 const VkBufferCreateInfo* pBufferCreateInfo,
1222 VmaAllocation* pAllocation,
1226 VmaAllocator allocator,
1228 VmaAllocation allocation);
1232 VmaAllocator allocator,
1233 const VkImageCreateInfo* pImageCreateInfo,
1236 VmaAllocation* pAllocation,
1240 VmaAllocator allocator,
1242 VmaAllocation allocation);
1246 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1249 #ifdef __INTELLISENSE__ 1250 #define VMA_IMPLEMENTATION 1253 #ifdef VMA_IMPLEMENTATION 1254 #undef VMA_IMPLEMENTATION 1276 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1287 #if VMA_USE_STL_CONTAINERS 1288 #define VMA_USE_STL_VECTOR 1 1289 #define VMA_USE_STL_UNORDERED_MAP 1 1290 #define VMA_USE_STL_LIST 1 1293 #if VMA_USE_STL_VECTOR 1297 #if VMA_USE_STL_UNORDERED_MAP 1298 #include <unordered_map> 1301 #if VMA_USE_STL_LIST 1310 #include <algorithm> 1314 #if !defined(_WIN32) 1321 #define VMA_ASSERT(expr) assert(expr) 1323 #define VMA_ASSERT(expr) 1329 #ifndef VMA_HEAVY_ASSERT 1331 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1333 #define VMA_HEAVY_ASSERT(expr) 1339 #define VMA_NULL nullptr 1342 #ifndef VMA_ALIGN_OF 1343 #define VMA_ALIGN_OF(type) (__alignof(type)) 1346 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1348 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1350 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1354 #ifndef VMA_SYSTEM_FREE 1356 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1358 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1363 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1367 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1371 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1375 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1378 #ifndef VMA_DEBUG_LOG 1379 #define VMA_DEBUG_LOG(format, ...) 1389 #if VMA_STATS_STRING_ENABLED 1390 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1392 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1394 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1396 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1398 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1400 snprintf(outStr, strLen,
"%p", ptr);
1410 void Lock() { m_Mutex.lock(); }
1411 void Unlock() { m_Mutex.unlock(); }
1415 #define VMA_MUTEX VmaMutex 1426 #ifndef VMA_ATOMIC_UINT32 1427 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1430 #ifndef VMA_BEST_FIT 1443 #define VMA_BEST_FIT (1) 1446 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 1451 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 1454 #ifndef VMA_DEBUG_ALIGNMENT 1459 #define VMA_DEBUG_ALIGNMENT (1) 1462 #ifndef VMA_DEBUG_MARGIN 1467 #define VMA_DEBUG_MARGIN (0) 1470 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1475 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1478 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1483 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1486 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1487 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1491 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1492 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1496 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1497 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1501 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1507 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1508 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1511 static inline uint32_t CountBitsSet(uint32_t v)
1513 uint32_t c = v - ((v >> 1) & 0x55555555);
1514 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1515 c = ((c >> 4) + c) & 0x0F0F0F0F;
1516 c = ((c >> 8) + c) & 0x00FF00FF;
1517 c = ((c >> 16) + c) & 0x0000FFFF;
1523 template <
typename T>
1524 static inline T VmaAlignUp(T val, T align)
1526 return (val + align - 1) / align * align;
1530 template <
typename T>
1531 inline T VmaRoundDiv(T x, T y)
1533 return (x + (y / (T)2)) / y;
1538 template<
typename Iterator,
typename Compare>
1539 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1541 Iterator centerValue = end; --centerValue;
1542 Iterator insertIndex = beg;
1543 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1545 if(cmp(*memTypeIndex, *centerValue))
1547 if(insertIndex != memTypeIndex)
1549 VMA_SWAP(*memTypeIndex, *insertIndex);
1554 if(insertIndex != centerValue)
1556 VMA_SWAP(*insertIndex, *centerValue);
1561 template<
typename Iterator,
typename Compare>
1562 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1566 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1567 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1568 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1572 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1574 #endif // #ifndef VMA_SORT 1583 static inline bool VmaBlocksOnSamePage(
1584 VkDeviceSize resourceAOffset,
1585 VkDeviceSize resourceASize,
1586 VkDeviceSize resourceBOffset,
1587 VkDeviceSize pageSize)
1589 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1590 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1591 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1592 VkDeviceSize resourceBStart = resourceBOffset;
1593 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1594 return resourceAEndPage == resourceBStartPage;
1597 enum VmaSuballocationType
1599 VMA_SUBALLOCATION_TYPE_FREE = 0,
1600 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1601 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1602 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1603 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1604 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1605 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1614 static inline bool VmaIsBufferImageGranularityConflict(
1615 VmaSuballocationType suballocType1,
1616 VmaSuballocationType suballocType2)
1618 if(suballocType1 > suballocType2)
1620 VMA_SWAP(suballocType1, suballocType2);
1623 switch(suballocType1)
1625 case VMA_SUBALLOCATION_TYPE_FREE:
1627 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1629 case VMA_SUBALLOCATION_TYPE_BUFFER:
1631 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1632 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1633 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1635 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1636 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1637 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1638 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1640 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1641 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1653 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1654 m_pMutex(useMutex ? &mutex : VMA_NULL)
1671 VMA_MUTEX* m_pMutex;
1674 #if VMA_DEBUG_GLOBAL_MUTEX 1675 static VMA_MUTEX gDebugGlobalMutex;
1676 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1678 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1682 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1693 template <
typename IterT,
typename KeyT,
typename CmpT>
1694 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1696 size_t down = 0, up = (end - beg);
1699 const size_t mid = (down + up) / 2;
1700 if(cmp(*(beg+mid), key))
1715 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1717 if((pAllocationCallbacks != VMA_NULL) &&
1718 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1720 return (*pAllocationCallbacks->pfnAllocation)(
1721 pAllocationCallbacks->pUserData,
1724 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1728 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1732 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1734 if((pAllocationCallbacks != VMA_NULL) &&
1735 (pAllocationCallbacks->pfnFree != VMA_NULL))
1737 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1741 VMA_SYSTEM_FREE(ptr);
1745 template<
typename T>
1746 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1748 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1751 template<
typename T>
1752 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1754 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1757 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1759 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1761 template<
typename T>
1762 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1765 VmaFree(pAllocationCallbacks, ptr);
1768 template<
typename T>
1769 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1773 for(
size_t i = count; i--; )
1777 VmaFree(pAllocationCallbacks, ptr);
1782 template<
typename T>
1783 class VmaStlAllocator
1786 const VkAllocationCallbacks*
const m_pCallbacks;
1787 typedef T value_type;
1789 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1790 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1792 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1793 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1795 template<
typename U>
1796 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1798 return m_pCallbacks == rhs.m_pCallbacks;
1800 template<
typename U>
1801 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1803 return m_pCallbacks != rhs.m_pCallbacks;
1806 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1809 #if VMA_USE_STL_VECTOR 1811 #define VmaVector std::vector 1813 template<
typename T,
typename allocatorT>
1814 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1816 vec.insert(vec.begin() + index, item);
1819 template<
typename T,
typename allocatorT>
1820 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1822 vec.erase(vec.begin() + index);
1825 #else // #if VMA_USE_STL_VECTOR 1830 template<
typename T,
typename AllocatorT>
1834 typedef T value_type;
1836 VmaVector(
const AllocatorT& allocator) :
1837 m_Allocator(allocator),
1844 VmaVector(
size_t count,
const AllocatorT& allocator) :
1845 m_Allocator(allocator),
1846 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1852 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1853 m_Allocator(src.m_Allocator),
1854 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
1855 m_Count(src.m_Count),
1856 m_Capacity(src.m_Count)
1860 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1866 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1869 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1873 resize(rhs.m_Count);
1876 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1882 bool empty()
const {
return m_Count == 0; }
1883 size_t size()
const {
return m_Count; }
1884 T* data() {
return m_pArray; }
1885 const T* data()
const {
return m_pArray; }
1887 T& operator[](
size_t index)
1889 VMA_HEAVY_ASSERT(index < m_Count);
1890 return m_pArray[index];
1892 const T& operator[](
size_t index)
const 1894 VMA_HEAVY_ASSERT(index < m_Count);
1895 return m_pArray[index];
1900 VMA_HEAVY_ASSERT(m_Count > 0);
1903 const T& front()
const 1905 VMA_HEAVY_ASSERT(m_Count > 0);
1910 VMA_HEAVY_ASSERT(m_Count > 0);
1911 return m_pArray[m_Count - 1];
1913 const T& back()
const 1915 VMA_HEAVY_ASSERT(m_Count > 0);
1916 return m_pArray[m_Count - 1];
1919 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1921 newCapacity = VMA_MAX(newCapacity, m_Count);
1923 if((newCapacity < m_Capacity) && !freeMemory)
1925 newCapacity = m_Capacity;
1928 if(newCapacity != m_Capacity)
1930 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1933 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1935 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1936 m_Capacity = newCapacity;
1937 m_pArray = newArray;
1941 void resize(
size_t newCount,
bool freeMemory =
false)
1943 size_t newCapacity = m_Capacity;
1944 if(newCount > m_Capacity)
1946 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1950 newCapacity = newCount;
1953 if(newCapacity != m_Capacity)
1955 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1956 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1957 if(elementsToCopy != 0)
1959 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1961 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1962 m_Capacity = newCapacity;
1963 m_pArray = newArray;
1969 void clear(
bool freeMemory =
false)
1971 resize(0, freeMemory);
1974 void insert(
size_t index,
const T& src)
1976 VMA_HEAVY_ASSERT(index <= m_Count);
1977 const size_t oldCount = size();
1978 resize(oldCount + 1);
1979 if(index < oldCount)
1981 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1983 m_pArray[index] = src;
1986 void remove(
size_t index)
1988 VMA_HEAVY_ASSERT(index < m_Count);
1989 const size_t oldCount = size();
1990 if(index < oldCount - 1)
1992 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1994 resize(oldCount - 1);
1997 void push_back(
const T& src)
1999 const size_t newIndex = size();
2000 resize(newIndex + 1);
2001 m_pArray[newIndex] = src;
2006 VMA_HEAVY_ASSERT(m_Count > 0);
2010 void push_front(
const T& src)
2017 VMA_HEAVY_ASSERT(m_Count > 0);
2021 typedef T* iterator;
2023 iterator begin() {
return m_pArray; }
2024 iterator end() {
return m_pArray + m_Count; }
2027 AllocatorT m_Allocator;
2033 template<
typename T,
typename allocatorT>
2034 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2036 vec.insert(index, item);
2039 template<
typename T,
typename allocatorT>
2040 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2045 #endif // #if VMA_USE_STL_VECTOR 2047 template<
typename CmpLess,
typename VectorT>
2048 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2050 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2052 vector.data() + vector.size(),
2054 CmpLess()) - vector.data();
2055 VmaVectorInsert(vector, indexToInsert, value);
2056 return indexToInsert;
2059 template<
typename CmpLess,
typename VectorT>
2060 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2063 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2068 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2070 size_t indexToRemove = it - vector.begin();
2071 VmaVectorRemove(vector, indexToRemove);
2077 template<
typename CmpLess,
typename VectorT>
2078 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2081 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2083 vector.data() + vector.size(),
2086 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2088 return it - vector.begin();
2092 return vector.size();
2104 template<
typename T>
2105 class VmaPoolAllocator
2108 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2109 ~VmaPoolAllocator();
2117 uint32_t NextFreeIndex;
2124 uint32_t FirstFreeIndex;
2127 const VkAllocationCallbacks* m_pAllocationCallbacks;
2128 size_t m_ItemsPerBlock;
2129 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2131 ItemBlock& CreateNewBlock();
2134 template<
typename T>
2135 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2136 m_pAllocationCallbacks(pAllocationCallbacks),
2137 m_ItemsPerBlock(itemsPerBlock),
2138 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2140 VMA_ASSERT(itemsPerBlock > 0);
2143 template<
typename T>
2144 VmaPoolAllocator<T>::~VmaPoolAllocator()
2149 template<
typename T>
2150 void VmaPoolAllocator<T>::Clear()
2152 for(
size_t i = m_ItemBlocks.size(); i--; )
2153 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2154 m_ItemBlocks.clear();
2157 template<
typename T>
2158 T* VmaPoolAllocator<T>::Alloc()
2160 for(
size_t i = m_ItemBlocks.size(); i--; )
2162 ItemBlock& block = m_ItemBlocks[i];
2164 if(block.FirstFreeIndex != UINT32_MAX)
2166 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2167 block.FirstFreeIndex = pItem->NextFreeIndex;
2168 return &pItem->Value;
2173 ItemBlock& newBlock = CreateNewBlock();
2174 Item*
const pItem = &newBlock.pItems[0];
2175 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2176 return &pItem->Value;
2179 template<
typename T>
2180 void VmaPoolAllocator<T>::Free(T* ptr)
2183 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2185 ItemBlock& block = m_ItemBlocks[i];
2189 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2192 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2194 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2195 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2196 block.FirstFreeIndex = index;
2200 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2203 template<
typename T>
2204 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2206 ItemBlock newBlock = {
2207 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2209 m_ItemBlocks.push_back(newBlock);
2212 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2213 newBlock.pItems[i].NextFreeIndex = i + 1;
2214 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2215 return m_ItemBlocks.back();
2221 #if VMA_USE_STL_LIST 2223 #define VmaList std::list 2225 #else // #if VMA_USE_STL_LIST 2227 template<
typename T>
2236 template<
typename T>
2240 typedef VmaListItem<T> ItemType;
2242 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2246 size_t GetCount()
const {
return m_Count; }
2247 bool IsEmpty()
const {
return m_Count == 0; }
2249 ItemType* Front() {
return m_pFront; }
2250 const ItemType* Front()
const {
return m_pFront; }
2251 ItemType* Back() {
return m_pBack; }
2252 const ItemType* Back()
const {
return m_pBack; }
2254 ItemType* PushBack();
2255 ItemType* PushFront();
2256 ItemType* PushBack(
const T& value);
2257 ItemType* PushFront(
const T& value);
2262 ItemType* InsertBefore(ItemType* pItem);
2264 ItemType* InsertAfter(ItemType* pItem);
2266 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2267 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2269 void Remove(ItemType* pItem);
2272 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2273 VmaPoolAllocator<ItemType> m_ItemAllocator;
2279 VmaRawList(
const VmaRawList<T>& src);
2280 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2283 template<
typename T>
2284 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2285 m_pAllocationCallbacks(pAllocationCallbacks),
2286 m_ItemAllocator(pAllocationCallbacks, 128),
2293 template<
typename T>
2294 VmaRawList<T>::~VmaRawList()
2300 template<
typename T>
2301 void VmaRawList<T>::Clear()
2303 if(IsEmpty() ==
false)
2305 ItemType* pItem = m_pBack;
2306 while(pItem != VMA_NULL)
2308 ItemType*
const pPrevItem = pItem->pPrev;
2309 m_ItemAllocator.Free(pItem);
2312 m_pFront = VMA_NULL;
2318 template<
typename T>
2319 VmaListItem<T>* VmaRawList<T>::PushBack()
2321 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2322 pNewItem->pNext = VMA_NULL;
2325 pNewItem->pPrev = VMA_NULL;
2326 m_pFront = pNewItem;
2332 pNewItem->pPrev = m_pBack;
2333 m_pBack->pNext = pNewItem;
2340 template<
typename T>
2341 VmaListItem<T>* VmaRawList<T>::PushFront()
2343 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2344 pNewItem->pPrev = VMA_NULL;
2347 pNewItem->pNext = VMA_NULL;
2348 m_pFront = pNewItem;
2354 pNewItem->pNext = m_pFront;
2355 m_pFront->pPrev = pNewItem;
2356 m_pFront = pNewItem;
2362 template<
typename T>
2363 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2365 ItemType*
const pNewItem = PushBack();
2366 pNewItem->Value = value;
2370 template<
typename T>
2371 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2373 ItemType*
const pNewItem = PushFront();
2374 pNewItem->Value = value;
2378 template<
typename T>
2379 void VmaRawList<T>::PopBack()
2381 VMA_HEAVY_ASSERT(m_Count > 0);
2382 ItemType*
const pBackItem = m_pBack;
2383 ItemType*
const pPrevItem = pBackItem->pPrev;
2384 if(pPrevItem != VMA_NULL)
2386 pPrevItem->pNext = VMA_NULL;
2388 m_pBack = pPrevItem;
2389 m_ItemAllocator.Free(pBackItem);
2393 template<
typename T>
2394 void VmaRawList<T>::PopFront()
2396 VMA_HEAVY_ASSERT(m_Count > 0);
2397 ItemType*
const pFrontItem = m_pFront;
2398 ItemType*
const pNextItem = pFrontItem->pNext;
2399 if(pNextItem != VMA_NULL)
2401 pNextItem->pPrev = VMA_NULL;
2403 m_pFront = pNextItem;
2404 m_ItemAllocator.Free(pFrontItem);
2408 template<
typename T>
2409 void VmaRawList<T>::Remove(ItemType* pItem)
2411 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2412 VMA_HEAVY_ASSERT(m_Count > 0);
2414 if(pItem->pPrev != VMA_NULL)
2416 pItem->pPrev->pNext = pItem->pNext;
2420 VMA_HEAVY_ASSERT(m_pFront == pItem);
2421 m_pFront = pItem->pNext;
2424 if(pItem->pNext != VMA_NULL)
2426 pItem->pNext->pPrev = pItem->pPrev;
2430 VMA_HEAVY_ASSERT(m_pBack == pItem);
2431 m_pBack = pItem->pPrev;
2434 m_ItemAllocator.Free(pItem);
2438 template<
typename T>
2439 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2441 if(pItem != VMA_NULL)
2443 ItemType*
const prevItem = pItem->pPrev;
2444 ItemType*
const newItem = m_ItemAllocator.Alloc();
2445 newItem->pPrev = prevItem;
2446 newItem->pNext = pItem;
2447 pItem->pPrev = newItem;
2448 if(prevItem != VMA_NULL)
2450 prevItem->pNext = newItem;
2454 VMA_HEAVY_ASSERT(m_pFront == pItem);
2464 template<
typename T>
2465 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2467 if(pItem != VMA_NULL)
2469 ItemType*
const nextItem = pItem->pNext;
2470 ItemType*
const newItem = m_ItemAllocator.Alloc();
2471 newItem->pNext = nextItem;
2472 newItem->pPrev = pItem;
2473 pItem->pNext = newItem;
2474 if(nextItem != VMA_NULL)
2476 nextItem->pPrev = newItem;
2480 VMA_HEAVY_ASSERT(m_pBack == pItem);
2490 template<
typename T>
2491 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2493 ItemType*
const newItem = InsertBefore(pItem);
2494 newItem->Value = value;
2498 template<
typename T>
2499 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2501 ItemType*
const newItem = InsertAfter(pItem);
2502 newItem->Value = value;
2506 template<
typename T,
typename AllocatorT>
2519 T& operator*()
const 2521 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2522 return m_pItem->Value;
2524 T* operator->()
const 2526 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2527 return &m_pItem->Value;
2530 iterator& operator++()
2532 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2533 m_pItem = m_pItem->pNext;
2536 iterator& operator--()
2538 if(m_pItem != VMA_NULL)
2540 m_pItem = m_pItem->pPrev;
2544 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2545 m_pItem = m_pList->Back();
2550 iterator operator++(
int)
2552 iterator result = *
this;
2556 iterator operator--(
int)
2558 iterator result = *
this;
2563 bool operator==(
const iterator& rhs)
const 2565 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2566 return m_pItem == rhs.m_pItem;
2568 bool operator!=(
const iterator& rhs)
const 2570 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2571 return m_pItem != rhs.m_pItem;
2575 VmaRawList<T>* m_pList;
2576 VmaListItem<T>* m_pItem;
2578 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2584 friend class VmaList<T, AllocatorT>;
2587 class const_iterator
2596 const_iterator(
const iterator& src) :
2597 m_pList(src.m_pList),
2598 m_pItem(src.m_pItem)
2602 const T& operator*()
const 2604 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2605 return m_pItem->Value;
2607 const T* operator->()
const 2609 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2610 return &m_pItem->Value;
2613 const_iterator& operator++()
2615 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2616 m_pItem = m_pItem->pNext;
2619 const_iterator& operator--()
2621 if(m_pItem != VMA_NULL)
2623 m_pItem = m_pItem->pPrev;
2627 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2628 m_pItem = m_pList->Back();
2633 const_iterator operator++(
int)
2635 const_iterator result = *
this;
2639 const_iterator operator--(
int)
2641 const_iterator result = *
this;
2646 bool operator==(
const const_iterator& rhs)
const 2648 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2649 return m_pItem == rhs.m_pItem;
2651 bool operator!=(
const const_iterator& rhs)
const 2653 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2654 return m_pItem != rhs.m_pItem;
2658 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2664 const VmaRawList<T>* m_pList;
2665 const VmaListItem<T>* m_pItem;
2667 friend class VmaList<T, AllocatorT>;
2670 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2672 bool empty()
const {
return m_RawList.IsEmpty(); }
2673 size_t size()
const {
return m_RawList.GetCount(); }
2675 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2676 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2678 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2679 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2681 void clear() { m_RawList.Clear(); }
2682 void push_back(
const T& value) { m_RawList.PushBack(value); }
2683 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2684 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2687 VmaRawList<T> m_RawList;
2690 #endif // #if VMA_USE_STL_LIST 2698 #if VMA_USE_STL_UNORDERED_MAP 2700 #define VmaPair std::pair 2702 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2703 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2705 #else // #if VMA_USE_STL_UNORDERED_MAP 2707 template<
typename T1,
typename T2>
2713 VmaPair() : first(), second() { }
2714 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2720 template<
typename KeyT,
typename ValueT>
2724 typedef VmaPair<KeyT, ValueT> PairType;
2725 typedef PairType* iterator;
2727 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2729 iterator begin() {
return m_Vector.begin(); }
2730 iterator end() {
return m_Vector.end(); }
2732 void insert(
const PairType& pair);
2733 iterator find(
const KeyT& key);
2734 void erase(iterator it);
2737 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2740 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2742 template<
typename FirstT,
typename SecondT>
2743 struct VmaPairFirstLess
2745 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2747 return lhs.first < rhs.first;
2749 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2751 return lhs.first < rhsFirst;
2755 template<
typename KeyT,
typename ValueT>
2756 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2758 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2760 m_Vector.data() + m_Vector.size(),
2762 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2763 VmaVectorInsert(m_Vector, indexToInsert, pair);
2766 template<
typename KeyT,
typename ValueT>
2767 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2769 PairType* it = VmaBinaryFindFirstNotLess(
2771 m_Vector.data() + m_Vector.size(),
2773 VmaPairFirstLess<KeyT, ValueT>());
2774 if((it != m_Vector.end()) && (it->first == key))
2780 return m_Vector.end();
2784 template<
typename KeyT,
typename ValueT>
2785 void VmaMap<KeyT, ValueT>::erase(iterator it)
2787 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2790 #endif // #if VMA_USE_STL_UNORDERED_MAP 2796 class VmaDeviceMemoryBlock;
2798 enum VMA_BLOCK_VECTOR_TYPE
2800 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2801 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2802 VMA_BLOCK_VECTOR_TYPE_COUNT
2808 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2809 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2812 struct VmaAllocation_T
2815 enum ALLOCATION_TYPE
2817 ALLOCATION_TYPE_NONE,
2818 ALLOCATION_TYPE_BLOCK,
2819 ALLOCATION_TYPE_OWN,
2822 VmaAllocation_T(uint32_t currentFrameIndex) :
2825 m_pUserData(VMA_NULL),
2826 m_Type(ALLOCATION_TYPE_NONE),
2827 m_SuballocationType(VMA_SUBALLOCATION_TYPE_UNKNOWN),
2828 m_LastUseFrameIndex(currentFrameIndex)
2832 void InitBlockAllocation(
2834 VmaDeviceMemoryBlock* block,
2835 VkDeviceSize offset,
2836 VkDeviceSize alignment,
2838 VmaSuballocationType suballocationType,
2842 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2843 VMA_ASSERT(block != VMA_NULL);
2844 m_Type = ALLOCATION_TYPE_BLOCK;
2845 m_Alignment = alignment;
2847 m_pUserData = pUserData;
2848 m_SuballocationType = suballocationType;
2849 m_BlockAllocation.m_hPool = hPool;
2850 m_BlockAllocation.m_Block = block;
2851 m_BlockAllocation.m_Offset = offset;
2852 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
2857 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2858 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
2859 m_Type = ALLOCATION_TYPE_BLOCK;
2860 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
2861 m_BlockAllocation.m_Block = VMA_NULL;
2862 m_BlockAllocation.m_Offset = 0;
2863 m_BlockAllocation.m_CanBecomeLost =
true;
2866 void ChangeBlockAllocation(
2867 VmaDeviceMemoryBlock* block,
2868 VkDeviceSize offset)
2870 VMA_ASSERT(block != VMA_NULL);
2871 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2872 m_BlockAllocation.m_Block = block;
2873 m_BlockAllocation.m_Offset = offset;
2876 void InitOwnAllocation(
2877 uint32_t memoryTypeIndex,
2878 VkDeviceMemory hMemory,
2879 VmaSuballocationType suballocationType,
2885 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2886 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2887 m_Type = ALLOCATION_TYPE_OWN;
2890 m_pUserData = pUserData;
2891 m_SuballocationType = suballocationType;
2892 m_OwnAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2893 m_OwnAllocation.m_hMemory = hMemory;
2894 m_OwnAllocation.m_PersistentMap = persistentMap;
2895 m_OwnAllocation.m_pMappedData = pMappedData;
2898 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2899 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2900 VkDeviceSize GetSize()
const {
return m_Size; }
2901 void* GetUserData()
const {
return m_pUserData; }
2902 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2903 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2905 VmaDeviceMemoryBlock* GetBlock()
const 2907 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2908 return m_BlockAllocation.m_Block;
2910 VkDeviceSize GetOffset()
const;
2911 VkDeviceMemory GetMemory()
const;
2912 uint32_t GetMemoryTypeIndex()
const;
2913 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2914 void* GetMappedData()
const;
2915 bool CanBecomeLost()
const;
2916 VmaPool GetPool()
const;
2918 VkResult OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator);
2919 void OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator);
2921 uint32_t GetLastUseFrameIndex()
const 2923 return m_LastUseFrameIndex.load();
2925 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
2927 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
2937 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
2941 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2953 VkDeviceSize m_Alignment;
2954 VkDeviceSize m_Size;
2956 ALLOCATION_TYPE m_Type;
2957 VmaSuballocationType m_SuballocationType;
2958 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
2961 struct BlockAllocation
2964 VmaDeviceMemoryBlock* m_Block;
2965 VkDeviceSize m_Offset;
2966 bool m_CanBecomeLost;
2970 struct OwnAllocation
2972 uint32_t m_MemoryTypeIndex;
2973 VkDeviceMemory m_hMemory;
2974 bool m_PersistentMap;
2975 void* m_pMappedData;
2981 BlockAllocation m_BlockAllocation;
2983 OwnAllocation m_OwnAllocation;
2991 struct VmaSuballocation
2993 VkDeviceSize offset;
2995 VmaAllocation hAllocation;
2996 VmaSuballocationType type;
2999 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3002 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3017 struct VmaAllocationRequest
3019 VkDeviceSize offset;
3020 VkDeviceSize sumFreeSize;
3021 VkDeviceSize sumItemSize;
3022 VmaSuballocationList::iterator item;
3023 size_t itemsToMakeLostCount;
3025 VkDeviceSize CalcCost()
const 3027 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3037 class VmaDeviceMemoryBlock
3040 uint32_t m_MemoryTypeIndex;
3041 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3042 VkDeviceMemory m_hMemory;
3043 VkDeviceSize m_Size;
3044 bool m_PersistentMap;
3045 void* m_pMappedData;
3046 uint32_t m_FreeCount;
3047 VkDeviceSize m_SumFreeSize;
3048 VmaSuballocationList m_Suballocations;
3051 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3053 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3055 ~VmaDeviceMemoryBlock()
3057 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3062 uint32_t newMemoryTypeIndex,
3063 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
3064 VkDeviceMemory newMemory,
3065 VkDeviceSize newSize,
3069 void Destroy(VmaAllocator allocator);
3072 bool Validate()
const;
3077 bool CreateAllocationRequest(
3078 uint32_t currentFrameIndex,
3079 uint32_t frameInUseCount,
3080 VkDeviceSize bufferImageGranularity,
3081 VkDeviceSize allocSize,
3082 VkDeviceSize allocAlignment,
3083 VmaSuballocationType allocType,
3084 bool canMakeOtherLost,
3085 VmaAllocationRequest* pAllocationRequest);
3087 bool MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest);
3089 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3092 bool IsEmpty()
const;
3097 const VmaAllocationRequest& request,
3098 VmaSuballocationType type,
3099 VkDeviceSize allocSize,
3100 VmaAllocation hAllocation);
3103 void Free(
const VmaAllocation allocation);
3105 #if VMA_STATS_STRING_ENABLED 3106 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3112 bool CheckAllocation(
3113 uint32_t currentFrameIndex,
3114 uint32_t frameInUseCount,
3115 VkDeviceSize bufferImageGranularity,
3116 VkDeviceSize allocSize,
3117 VkDeviceSize allocAlignment,
3118 VmaSuballocationType allocType,
3119 VmaSuballocationList::const_iterator suballocItem,
3120 bool canMakeOtherLost,
3121 VkDeviceSize* pOffset,
3122 size_t* itemsToMakeLostCount,
3123 VkDeviceSize* pSumFreeSize,
3124 VkDeviceSize* pSumItemSize)
const;
3127 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3131 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3134 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3137 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3139 bool ValidateFreeSuballocationList()
const;
3142 struct VmaPointerLess
3144 bool operator()(
const void* lhs,
const void* rhs)
const 3150 class VmaDefragmentator;
3158 struct VmaBlockVector
3161 VmaAllocator hAllocator,
3162 uint32_t memoryTypeIndex,
3163 VMA_BLOCK_VECTOR_TYPE blockVectorType,
3164 VkDeviceSize preferredBlockSize,
3165 size_t minBlockCount,
3166 size_t maxBlockCount,
3167 VkDeviceSize bufferImageGranularity,
3168 uint32_t frameInUseCount,
3172 VkResult CreateMinBlocks();
3174 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3175 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3176 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3177 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3178 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const {
return m_BlockVectorType; }
3182 bool IsEmpty()
const {
return m_Blocks.empty(); }
3185 VmaPool hCurrentPool,
3186 uint32_t currentFrameIndex,
3187 const VkMemoryRequirements& vkMemReq,
3189 VmaSuballocationType suballocType,
3190 VmaAllocation* pAllocation);
3193 VmaAllocation hAllocation);
3198 #if VMA_STATS_STRING_ENABLED 3199 void PrintDetailedMap(
class VmaJsonWriter& json);
3202 void UnmapPersistentlyMappedMemory();
3203 VkResult MapPersistentlyMappedMemory();
3205 void MakePoolAllocationsLost(
3206 uint32_t currentFrameIndex,
3207 size_t* pLostAllocationCount);
3209 VmaDefragmentator* EnsureDefragmentator(
3210 VmaAllocator hAllocator,
3211 uint32_t currentFrameIndex);
3213 VkResult Defragment(
3215 VkDeviceSize& maxBytesToMove,
3216 uint32_t& maxAllocationsToMove);
3218 void DestroyDefragmentator();
3221 friend class VmaDefragmentator;
3223 const VmaAllocator m_hAllocator;
3224 const uint32_t m_MemoryTypeIndex;
3225 const VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3226 const VkDeviceSize m_PreferredBlockSize;
3227 const size_t m_MinBlockCount;
3228 const size_t m_MaxBlockCount;
3229 const VkDeviceSize m_BufferImageGranularity;
3230 const uint32_t m_FrameInUseCount;
3231 const bool m_IsCustomPool;
3234 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3238 bool m_HasEmptyBlock;
3239 VmaDefragmentator* m_pDefragmentator;
3242 void Remove(VmaDeviceMemoryBlock* pBlock);
3246 void IncrementallySortBlocks();
3248 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3254 VmaBlockVector m_BlockVector;
3258 VmaAllocator hAllocator,
3262 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3264 #if VMA_STATS_STRING_ENABLED 3269 class VmaDefragmentator
3271 const VmaAllocator m_hAllocator;
3272 VmaBlockVector*
const m_pBlockVector;
3273 uint32_t m_CurrentFrameIndex;
3274 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3275 VkDeviceSize m_BytesMoved;
3276 uint32_t m_AllocationsMoved;
3278 struct AllocationInfo
3280 VmaAllocation m_hAllocation;
3281 VkBool32* m_pChanged;
3284 m_hAllocation(VK_NULL_HANDLE),
3285 m_pChanged(VMA_NULL)
3290 struct AllocationInfoSizeGreater
3292 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3294 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3299 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3303 VmaDeviceMemoryBlock* m_pBlock;
3304 bool m_HasNonMovableAllocations;
3305 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3307 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3309 m_HasNonMovableAllocations(true),
3310 m_Allocations(pAllocationCallbacks),
3311 m_pMappedDataForDefragmentation(VMA_NULL)
3315 void CalcHasNonMovableAllocations()
3317 const size_t blockAllocCount =
3318 m_pBlock->m_Suballocations.size() - m_pBlock->m_FreeCount;
3319 const size_t defragmentAllocCount = m_Allocations.size();
3320 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3323 void SortAllocationsBySizeDescecnding()
3325 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3328 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3329 void Unmap(VmaAllocator hAllocator);
3333 void* m_pMappedDataForDefragmentation;
3336 struct BlockPointerLess
3338 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3340 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3342 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3344 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3350 struct BlockInfoCompareMoveDestination
3352 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3354 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3358 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3362 if(pLhsBlockInfo->m_pBlock->m_SumFreeSize < pRhsBlockInfo->m_pBlock->m_SumFreeSize)
3370 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3371 BlockInfoVector m_Blocks;
3373 VkResult DefragmentRound(
3374 VkDeviceSize maxBytesToMove,
3375 uint32_t maxAllocationsToMove);
3377 static bool MoveMakesSense(
3378 size_t dstBlockIndex, VkDeviceSize dstOffset,
3379 size_t srcBlockIndex, VkDeviceSize srcOffset);
3383 VmaAllocator hAllocator,
3384 VmaBlockVector* pBlockVector,
3385 uint32_t currentFrameIndex);
3387 ~VmaDefragmentator();
3389 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3390 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3392 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3394 VkResult Defragment(
3395 VkDeviceSize maxBytesToMove,
3396 uint32_t maxAllocationsToMove);
3400 struct VmaAllocator_T
3404 bool m_AllocationCallbacksSpecified;
3405 VkAllocationCallbacks m_AllocationCallbacks;
3409 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
3412 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3413 VMA_MUTEX m_HeapSizeLimitMutex;
3415 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3416 VkPhysicalDeviceMemoryProperties m_MemProps;
3419 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3422 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3423 AllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3424 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
3429 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3431 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3435 return m_VulkanFunctions;
3438 VkDeviceSize GetBufferImageGranularity()
const 3441 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3442 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3445 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3446 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3448 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3450 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3451 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3455 VkResult AllocateMemory(
3456 const VkMemoryRequirements& vkMemReq,
3458 VmaSuballocationType suballocType,
3459 VmaAllocation* pAllocation);
3462 void FreeMemory(
const VmaAllocation allocation);
3464 void CalculateStats(
VmaStats* pStats);
3466 #if VMA_STATS_STRING_ENABLED 3467 void PrintDetailedMap(
class VmaJsonWriter& json);
3470 void UnmapPersistentlyMappedMemory();
3471 VkResult MapPersistentlyMappedMemory();
3473 VkResult Defragment(
3474 VmaAllocation* pAllocations,
3475 size_t allocationCount,
3476 VkBool32* pAllocationsChanged,
3480 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3483 void DestroyPool(VmaPool pool);
3484 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3486 void SetCurrentFrameIndex(uint32_t frameIndex);
3488 void MakePoolAllocationsLost(
3490 size_t* pLostAllocationCount);
3492 void CreateLostAllocation(VmaAllocation* pAllocation);
3494 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3495 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3498 VkDeviceSize m_PreferredLargeHeapBlockSize;
3499 VkDeviceSize m_PreferredSmallHeapBlockSize;
3501 VkPhysicalDevice m_PhysicalDevice;
3502 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3504 VMA_MUTEX m_PoolsMutex;
3506 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3512 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3514 VkResult AllocateMemoryOfType(
3515 const VkMemoryRequirements& vkMemReq,
3517 uint32_t memTypeIndex,
3518 VmaSuballocationType suballocType,
3519 VmaAllocation* pAllocation);
3522 VkResult AllocateOwnMemory(
3524 VmaSuballocationType suballocType,
3525 uint32_t memTypeIndex,
3528 VmaAllocation* pAllocation);
3531 void FreeOwnMemory(VmaAllocation allocation);
3537 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3539 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3542 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3544 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3547 template<
typename T>
3548 static T* VmaAllocate(VmaAllocator hAllocator)
3550 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3553 template<
typename T>
3554 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3556 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3559 template<
typename T>
3560 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3565 VmaFree(hAllocator, ptr);
3569 template<
typename T>
3570 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3574 for(
size_t i = count; i--; )
3576 VmaFree(hAllocator, ptr);
3583 #if VMA_STATS_STRING_ENABLED 3585 class VmaStringBuilder
3588 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3589 size_t GetLength()
const {
return m_Data.size(); }
3590 const char* GetData()
const {
return m_Data.data(); }
3592 void Add(
char ch) { m_Data.push_back(ch); }
3593 void Add(
const char* pStr);
3594 void AddNewLine() { Add(
'\n'); }
3595 void AddNumber(uint32_t num);
3596 void AddNumber(uint64_t num);
3597 void AddPointer(
const void* ptr);
3600 VmaVector< char, VmaStlAllocator<char> > m_Data;
3603 void VmaStringBuilder::Add(
const char* pStr)
3605 const size_t strLen = strlen(pStr);
3608 const size_t oldCount = m_Data.size();
3609 m_Data.resize(oldCount + strLen);
3610 memcpy(m_Data.data() + oldCount, pStr, strLen);
3614 void VmaStringBuilder::AddNumber(uint32_t num)
3617 VmaUint32ToStr(buf,
sizeof(buf), num);
3621 void VmaStringBuilder::AddNumber(uint64_t num)
3624 VmaUint64ToStr(buf,
sizeof(buf), num);
3628 void VmaStringBuilder::AddPointer(
const void* ptr)
3631 VmaPtrToStr(buf,
sizeof(buf), ptr);
3635 #endif // #if VMA_STATS_STRING_ENABLED 3640 #if VMA_STATS_STRING_ENABLED 3645 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3648 void BeginObject(
bool singleLine =
false);
3651 void BeginArray(
bool singleLine =
false);
3654 void WriteString(
const char* pStr);
3655 void BeginString(
const char* pStr = VMA_NULL);
3656 void ContinueString(
const char* pStr);
3657 void ContinueString(uint32_t n);
3658 void ContinueString(uint64_t n);
3659 void EndString(
const char* pStr = VMA_NULL);
3661 void WriteNumber(uint32_t n);
3662 void WriteNumber(uint64_t n);
3663 void WriteBool(
bool b);
3667 static const char*
const INDENT;
3669 enum COLLECTION_TYPE
3671 COLLECTION_TYPE_OBJECT,
3672 COLLECTION_TYPE_ARRAY,
3676 COLLECTION_TYPE type;
3677 uint32_t valueCount;
3678 bool singleLineMode;
3681 VmaStringBuilder& m_SB;
3682 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3683 bool m_InsideString;
3685 void BeginValue(
bool isString);
3686 void WriteIndent(
bool oneLess =
false);
3689 const char*
const VmaJsonWriter::INDENT =
" ";
3691 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3693 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3694 m_InsideString(false)
3698 VmaJsonWriter::~VmaJsonWriter()
3700 VMA_ASSERT(!m_InsideString);
3701 VMA_ASSERT(m_Stack.empty());
3704 void VmaJsonWriter::BeginObject(
bool singleLine)
3706 VMA_ASSERT(!m_InsideString);
3712 item.type = COLLECTION_TYPE_OBJECT;
3713 item.valueCount = 0;
3714 item.singleLineMode = singleLine;
3715 m_Stack.push_back(item);
3718 void VmaJsonWriter::EndObject()
3720 VMA_ASSERT(!m_InsideString);
3725 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3729 void VmaJsonWriter::BeginArray(
bool singleLine)
3731 VMA_ASSERT(!m_InsideString);
3737 item.type = COLLECTION_TYPE_ARRAY;
3738 item.valueCount = 0;
3739 item.singleLineMode = singleLine;
3740 m_Stack.push_back(item);
3743 void VmaJsonWriter::EndArray()
3745 VMA_ASSERT(!m_InsideString);
3750 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3754 void VmaJsonWriter::WriteString(
const char* pStr)
3760 void VmaJsonWriter::BeginString(
const char* pStr)
3762 VMA_ASSERT(!m_InsideString);
3766 m_InsideString =
true;
3767 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3769 ContinueString(pStr);
3773 void VmaJsonWriter::ContinueString(
const char* pStr)
3775 VMA_ASSERT(m_InsideString);
3777 const size_t strLen = strlen(pStr);
3778 for(
size_t i = 0; i < strLen; ++i)
3805 VMA_ASSERT(0 &&
"Character not currently supported.");
3811 void VmaJsonWriter::ContinueString(uint32_t n)
3813 VMA_ASSERT(m_InsideString);
3817 void VmaJsonWriter::ContinueString(uint64_t n)
3819 VMA_ASSERT(m_InsideString);
3823 void VmaJsonWriter::EndString(
const char* pStr)
3825 VMA_ASSERT(m_InsideString);
3826 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3828 ContinueString(pStr);
3831 m_InsideString =
false;
3834 void VmaJsonWriter::WriteNumber(uint32_t n)
3836 VMA_ASSERT(!m_InsideString);
3841 void VmaJsonWriter::WriteNumber(uint64_t n)
3843 VMA_ASSERT(!m_InsideString);
3848 void VmaJsonWriter::WriteBool(
bool b)
3850 VMA_ASSERT(!m_InsideString);
3852 m_SB.Add(b ?
"true" :
"false");
3855 void VmaJsonWriter::WriteNull()
3857 VMA_ASSERT(!m_InsideString);
3862 void VmaJsonWriter::BeginValue(
bool isString)
3864 if(!m_Stack.empty())
3866 StackItem& currItem = m_Stack.back();
3867 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3868 currItem.valueCount % 2 == 0)
3870 VMA_ASSERT(isString);
3873 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3874 currItem.valueCount % 2 != 0)
3878 else if(currItem.valueCount > 0)
3887 ++currItem.valueCount;
3891 void VmaJsonWriter::WriteIndent(
bool oneLess)
3893 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
3897 size_t count = m_Stack.size();
3898 if(count > 0 && oneLess)
3902 for(
size_t i = 0; i < count; ++i)
3909 #endif // #if VMA_STATS_STRING_ENABLED 3913 VkDeviceSize VmaAllocation_T::GetOffset()
const 3917 case ALLOCATION_TYPE_BLOCK:
3918 return m_BlockAllocation.m_Offset;
3919 case ALLOCATION_TYPE_OWN:
3927 VkDeviceMemory VmaAllocation_T::GetMemory()
const 3931 case ALLOCATION_TYPE_BLOCK:
3932 return m_BlockAllocation.m_Block->m_hMemory;
3933 case ALLOCATION_TYPE_OWN:
3934 return m_OwnAllocation.m_hMemory;
3937 return VK_NULL_HANDLE;
3941 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 3945 case ALLOCATION_TYPE_BLOCK:
3946 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
3947 case ALLOCATION_TYPE_OWN:
3948 return m_OwnAllocation.m_MemoryTypeIndex;
3955 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 3959 case ALLOCATION_TYPE_BLOCK:
3960 return m_BlockAllocation.m_Block->m_BlockVectorType;
3961 case ALLOCATION_TYPE_OWN:
3962 return (m_OwnAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
3965 return VMA_BLOCK_VECTOR_TYPE_COUNT;
3969 void* VmaAllocation_T::GetMappedData()
const 3973 case ALLOCATION_TYPE_BLOCK:
3974 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
3976 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
3983 case ALLOCATION_TYPE_OWN:
3984 return m_OwnAllocation.m_pMappedData;
3991 bool VmaAllocation_T::CanBecomeLost()
const 3995 case ALLOCATION_TYPE_BLOCK:
3996 return m_BlockAllocation.m_CanBecomeLost;
3997 case ALLOCATION_TYPE_OWN:
4005 VmaPool VmaAllocation_T::GetPool()
const 4007 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4008 return m_BlockAllocation.m_hPool;
4011 VkResult VmaAllocation_T::OwnAllocMapPersistentlyMappedMemory(VmaAllocator hAllocator)
4013 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4014 if(m_OwnAllocation.m_PersistentMap)
4016 return (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4017 hAllocator->m_hDevice,
4018 m_OwnAllocation.m_hMemory,
4022 &m_OwnAllocation.m_pMappedData);
4026 void VmaAllocation_T::OwnAllocUnmapPersistentlyMappedMemory(VmaAllocator hAllocator)
4028 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
4029 if(m_OwnAllocation.m_pMappedData)
4031 VMA_ASSERT(m_OwnAllocation.m_PersistentMap);
4032 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_OwnAllocation.m_hMemory);
4033 m_OwnAllocation.m_pMappedData = VMA_NULL;
4038 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4040 VMA_ASSERT(CanBecomeLost());
4046 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4049 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4054 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4060 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4070 #if VMA_STATS_STRING_ENABLED 4073 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4082 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4086 json.WriteString(
"Blocks");
4089 json.WriteString(
"Allocations");
4092 json.WriteString(
"UnusedRanges");
4095 json.WriteString(
"UsedBytes");
4098 json.WriteString(
"UnusedBytes");
4103 json.WriteString(
"AllocationSize");
4104 json.BeginObject(
true);
4105 json.WriteString(
"Min");
4107 json.WriteString(
"Avg");
4109 json.WriteString(
"Max");
4116 json.WriteString(
"UnusedRangeSize");
4117 json.BeginObject(
true);
4118 json.WriteString(
"Min");
4120 json.WriteString(
"Avg");
4122 json.WriteString(
"Max");
4130 #endif // #if VMA_STATS_STRING_ENABLED 4132 struct VmaSuballocationItemSizeLess
4135 const VmaSuballocationList::iterator lhs,
4136 const VmaSuballocationList::iterator rhs)
const 4138 return lhs->size < rhs->size;
4141 const VmaSuballocationList::iterator lhs,
4142 VkDeviceSize rhsSize)
const 4144 return lhs->size < rhsSize;
4148 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
4149 m_MemoryTypeIndex(UINT32_MAX),
4150 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
4151 m_hMemory(VK_NULL_HANDLE),
4153 m_PersistentMap(false),
4154 m_pMappedData(VMA_NULL),
4157 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4158 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4162 void VmaDeviceMemoryBlock::Init(
4163 uint32_t newMemoryTypeIndex,
4164 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
4165 VkDeviceMemory newMemory,
4166 VkDeviceSize newSize,
4170 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4172 m_MemoryTypeIndex = newMemoryTypeIndex;
4173 m_BlockVectorType = newBlockVectorType;
4174 m_hMemory = newMemory;
4176 m_PersistentMap = persistentMap;
4177 m_pMappedData = pMappedData;
4179 m_SumFreeSize = newSize;
4181 m_Suballocations.clear();
4182 m_FreeSuballocationsBySize.clear();
4184 VmaSuballocation suballoc = {};
4185 suballoc.offset = 0;
4186 suballoc.size = newSize;
4187 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4188 suballoc.hAllocation = VK_NULL_HANDLE;
4190 m_Suballocations.push_back(suballoc);
4191 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4193 m_FreeSuballocationsBySize.push_back(suballocItem);
4196 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
4200 VMA_ASSERT(IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
4202 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
4203 if(m_pMappedData != VMA_NULL)
4205 (allocator->GetVulkanFunctions().vkUnmapMemory)(allocator->m_hDevice, m_hMemory);
4206 m_pMappedData = VMA_NULL;
4209 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Size, m_hMemory);
4210 m_hMemory = VK_NULL_HANDLE;
4213 bool VmaDeviceMemoryBlock::Validate()
const 4215 if((m_hMemory == VK_NULL_HANDLE) ||
4217 m_Suballocations.empty())
4223 VkDeviceSize calculatedOffset = 0;
4225 uint32_t calculatedFreeCount = 0;
4227 VkDeviceSize calculatedSumFreeSize = 0;
4230 size_t freeSuballocationsToRegister = 0;
4232 bool prevFree =
false;
4234 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4235 suballocItem != m_Suballocations.cend();
4238 const VmaSuballocation& subAlloc = *suballocItem;
4241 if(subAlloc.offset != calculatedOffset)
4246 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4248 if(prevFree && currFree)
4252 prevFree = currFree;
4254 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4261 calculatedSumFreeSize += subAlloc.size;
4262 ++calculatedFreeCount;
4263 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4265 ++freeSuballocationsToRegister;
4269 calculatedOffset += subAlloc.size;
4274 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4279 VkDeviceSize lastSize = 0;
4280 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4282 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4285 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4290 if(suballocItem->size < lastSize)
4295 lastSize = suballocItem->size;
4300 (calculatedOffset == m_Size) &&
4301 (calculatedSumFreeSize == m_SumFreeSize) &&
4302 (calculatedFreeCount == m_FreeCount);
4315 bool VmaDeviceMemoryBlock::CreateAllocationRequest(
4316 uint32_t currentFrameIndex,
4317 uint32_t frameInUseCount,
4318 VkDeviceSize bufferImageGranularity,
4319 VkDeviceSize allocSize,
4320 VkDeviceSize allocAlignment,
4321 VmaSuballocationType allocType,
4322 bool canMakeOtherLost,
4323 VmaAllocationRequest* pAllocationRequest)
4325 VMA_ASSERT(allocSize > 0);
4326 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4327 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4328 VMA_HEAVY_ASSERT(Validate());
4331 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4337 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4338 if(freeSuballocCount > 0)
4343 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4344 m_FreeSuballocationsBySize.data(),
4345 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4347 VmaSuballocationItemSizeLess());
4348 size_t index = it - m_FreeSuballocationsBySize.data();
4349 for(; index < freeSuballocCount; ++index)
4354 bufferImageGranularity,
4358 m_FreeSuballocationsBySize[index],
4360 &pAllocationRequest->offset,
4361 &pAllocationRequest->itemsToMakeLostCount,
4362 &pAllocationRequest->sumFreeSize,
4363 &pAllocationRequest->sumItemSize))
4365 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4373 for(
size_t index = freeSuballocCount; index--; )
4378 bufferImageGranularity,
4382 m_FreeSuballocationsBySize[index],
4384 &pAllocationRequest->offset,
4385 &pAllocationRequest->itemsToMakeLostCount,
4386 &pAllocationRequest->sumFreeSize,
4387 &pAllocationRequest->sumItemSize))
4389 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4396 if(canMakeOtherLost)
4400 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4401 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4403 VmaAllocationRequest tmpAllocRequest = {};
4404 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4405 suballocIt != m_Suballocations.end();
4408 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4409 suballocIt->hAllocation->CanBecomeLost())
4414 bufferImageGranularity,
4420 &tmpAllocRequest.offset,
4421 &tmpAllocRequest.itemsToMakeLostCount,
4422 &tmpAllocRequest.sumFreeSize,
4423 &tmpAllocRequest.sumItemSize))
4425 tmpAllocRequest.item = suballocIt;
4427 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4429 *pAllocationRequest = tmpAllocRequest;
4435 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4444 bool VmaDeviceMemoryBlock::MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest)
4446 while(pAllocationRequest->itemsToMakeLostCount > 0)
4448 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4450 ++pAllocationRequest->item;
4452 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4453 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4454 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4455 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4457 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4458 --pAllocationRequest->itemsToMakeLostCount;
4466 VMA_HEAVY_ASSERT(Validate());
4467 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4468 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4473 uint32_t VmaDeviceMemoryBlock::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4475 uint32_t lostAllocationCount = 0;
4476 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4477 it != m_Suballocations.end();
4480 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4481 it->hAllocation->CanBecomeLost() &&
4482 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4484 it = FreeSuballocation(it);
4485 ++lostAllocationCount;
4488 return lostAllocationCount;
4491 bool VmaDeviceMemoryBlock::CheckAllocation(
4492 uint32_t currentFrameIndex,
4493 uint32_t frameInUseCount,
4494 VkDeviceSize bufferImageGranularity,
4495 VkDeviceSize allocSize,
4496 VkDeviceSize allocAlignment,
4497 VmaSuballocationType allocType,
4498 VmaSuballocationList::const_iterator suballocItem,
4499 bool canMakeOtherLost,
4500 VkDeviceSize* pOffset,
4501 size_t* itemsToMakeLostCount,
4502 VkDeviceSize* pSumFreeSize,
4503 VkDeviceSize* pSumItemSize)
const 4505 VMA_ASSERT(allocSize > 0);
4506 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4507 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4508 VMA_ASSERT(pOffset != VMA_NULL);
4510 *itemsToMakeLostCount = 0;
4514 if(canMakeOtherLost)
4516 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4518 *pSumFreeSize = suballocItem->size;
4522 if(suballocItem->hAllocation->CanBecomeLost() &&
4523 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4525 ++*itemsToMakeLostCount;
4526 *pSumItemSize = suballocItem->size;
4535 if(m_Size - suballocItem->offset < allocSize)
4541 *pOffset = suballocItem->offset;
4544 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4546 *pOffset += VMA_DEBUG_MARGIN;
4550 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4551 *pOffset = VmaAlignUp(*pOffset, alignment);
4555 if(bufferImageGranularity > 1)
4557 bool bufferImageGranularityConflict =
false;
4558 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4559 while(prevSuballocItem != m_Suballocations.cbegin())
4562 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4563 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4565 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4567 bufferImageGranularityConflict =
true;
4575 if(bufferImageGranularityConflict)
4577 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4583 if(*pOffset >= suballocItem->offset + suballocItem->size)
4589 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
4592 VmaSuballocationList::const_iterator next = suballocItem;
4594 const VkDeviceSize requiredEndMargin =
4595 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4597 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
4599 if(suballocItem->offset + totalSize > m_Size)
4606 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
4607 if(totalSize > suballocItem->size)
4609 VkDeviceSize remainingSize = totalSize - suballocItem->size;
4610 while(remainingSize > 0)
4613 if(lastSuballocItem == m_Suballocations.cend())
4617 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4619 *pSumFreeSize += lastSuballocItem->size;
4623 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
4624 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
4625 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4627 ++*itemsToMakeLostCount;
4628 *pSumItemSize += lastSuballocItem->size;
4635 remainingSize = (lastSuballocItem->size < remainingSize) ?
4636 remainingSize - lastSuballocItem->size : 0;
4642 if(bufferImageGranularity > 1)
4644 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
4646 while(nextSuballocItem != m_Suballocations.cend())
4648 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4649 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4651 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4653 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
4654 if(nextSuballoc.hAllocation->CanBecomeLost() &&
4655 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4657 ++*itemsToMakeLostCount;
4676 const VmaSuballocation& suballoc = *suballocItem;
4677 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4679 *pSumFreeSize = suballoc.size;
4682 if(suballoc.size < allocSize)
4688 *pOffset = suballoc.offset;
4691 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4693 *pOffset += VMA_DEBUG_MARGIN;
4697 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4698 *pOffset = VmaAlignUp(*pOffset, alignment);
4702 if(bufferImageGranularity > 1)
4704 bool bufferImageGranularityConflict =
false;
4705 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4706 while(prevSuballocItem != m_Suballocations.cbegin())
4709 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4710 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4712 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4714 bufferImageGranularityConflict =
true;
4722 if(bufferImageGranularityConflict)
4724 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4729 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
4732 VmaSuballocationList::const_iterator next = suballocItem;
4734 const VkDeviceSize requiredEndMargin =
4735 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4738 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
4745 if(bufferImageGranularity > 1)
4747 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
4749 while(nextSuballocItem != m_Suballocations.cend())
4751 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4752 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4754 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4773 bool VmaDeviceMemoryBlock::IsEmpty()
const 4775 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4778 void VmaDeviceMemoryBlock::Alloc(
4779 const VmaAllocationRequest& request,
4780 VmaSuballocationType type,
4781 VkDeviceSize allocSize,
4782 VmaAllocation hAllocation)
4784 VMA_ASSERT(request.item != m_Suballocations.end());
4785 VmaSuballocation& suballoc = *request.item;
4787 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4789 VMA_ASSERT(request.offset >= suballoc.offset);
4790 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4791 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4792 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4796 UnregisterFreeSuballocation(request.item);
4798 suballoc.offset = request.offset;
4799 suballoc.size = allocSize;
4800 suballoc.type = type;
4801 suballoc.hAllocation = hAllocation;
4806 VmaSuballocation paddingSuballoc = {};
4807 paddingSuballoc.offset = request.offset + allocSize;
4808 paddingSuballoc.size = paddingEnd;
4809 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4810 VmaSuballocationList::iterator next = request.item;
4812 const VmaSuballocationList::iterator paddingEndItem =
4813 m_Suballocations.insert(next, paddingSuballoc);
4814 RegisterFreeSuballocation(paddingEndItem);
4820 VmaSuballocation paddingSuballoc = {};
4821 paddingSuballoc.offset = request.offset - paddingBegin;
4822 paddingSuballoc.size = paddingBegin;
4823 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4824 const VmaSuballocationList::iterator paddingBeginItem =
4825 m_Suballocations.insert(request.item, paddingSuballoc);
4826 RegisterFreeSuballocation(paddingBeginItem);
4830 m_FreeCount = m_FreeCount - 1;
4831 if(paddingBegin > 0)
4839 m_SumFreeSize -= allocSize;
4842 VmaSuballocationList::iterator VmaDeviceMemoryBlock::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
4845 VmaSuballocation& suballoc = *suballocItem;
4846 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4847 suballoc.hAllocation = VK_NULL_HANDLE;
4851 m_SumFreeSize += suballoc.size;
4854 bool mergeWithNext =
false;
4855 bool mergeWithPrev =
false;
4857 VmaSuballocationList::iterator nextItem = suballocItem;
4859 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
4861 mergeWithNext =
true;
4864 VmaSuballocationList::iterator prevItem = suballocItem;
4865 if(suballocItem != m_Suballocations.begin())
4868 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4870 mergeWithPrev =
true;
4876 UnregisterFreeSuballocation(nextItem);
4877 MergeFreeWithNext(suballocItem);
4882 UnregisterFreeSuballocation(prevItem);
4883 MergeFreeWithNext(prevItem);
4884 RegisterFreeSuballocation(prevItem);
4889 RegisterFreeSuballocation(suballocItem);
4890 return suballocItem;
4894 void VmaDeviceMemoryBlock::Free(
const VmaAllocation allocation)
4896 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4897 suballocItem != m_Suballocations.end();
4900 VmaSuballocation& suballoc = *suballocItem;
4901 if(suballoc.hAllocation == allocation)
4903 FreeSuballocation(suballocItem);
4904 VMA_HEAVY_ASSERT(Validate());
4908 VMA_ASSERT(0 &&
"Not found!");
4911 #if VMA_STATS_STRING_ENABLED 4913 void VmaDeviceMemoryBlock::PrintDetailedMap(
class VmaJsonWriter& json)
const 4917 json.WriteString(
"TotalBytes");
4918 json.WriteNumber(m_Size);
4920 json.WriteString(
"UnusedBytes");
4921 json.WriteNumber(m_SumFreeSize);
4923 json.WriteString(
"Allocations");
4924 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4926 json.WriteString(
"UnusedRanges");
4927 json.WriteNumber(m_FreeCount);
4929 json.WriteString(
"Suballocations");
4932 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4933 suballocItem != m_Suballocations.cend();
4934 ++suballocItem, ++i)
4936 json.BeginObject(
true);
4938 json.WriteString(
"Type");
4939 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4941 json.WriteString(
"Size");
4942 json.WriteNumber(suballocItem->size);
4944 json.WriteString(
"Offset");
4945 json.WriteNumber(suballocItem->offset);
4954 #endif // #if VMA_STATS_STRING_ENABLED 4956 void VmaDeviceMemoryBlock::MergeFreeWithNext(VmaSuballocationList::iterator item)
4958 VMA_ASSERT(item != m_Suballocations.end());
4959 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4961 VmaSuballocationList::iterator nextItem = item;
4963 VMA_ASSERT(nextItem != m_Suballocations.end());
4964 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
4966 item->size += nextItem->size;
4968 m_Suballocations.erase(nextItem);
4971 void VmaDeviceMemoryBlock::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
4973 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4974 VMA_ASSERT(item->size > 0);
4978 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
4980 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4982 if(m_FreeSuballocationsBySize.empty())
4984 m_FreeSuballocationsBySize.push_back(item);
4988 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
4996 void VmaDeviceMemoryBlock::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
4998 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4999 VMA_ASSERT(item->size > 0);
5003 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5005 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5007 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5008 m_FreeSuballocationsBySize.data(),
5009 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5011 VmaSuballocationItemSizeLess());
5012 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5013 index < m_FreeSuballocationsBySize.size();
5016 if(m_FreeSuballocationsBySize[index] == item)
5018 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5021 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5023 VMA_ASSERT(0 &&
"Not found.");
5029 bool VmaDeviceMemoryBlock::ValidateFreeSuballocationList()
const 5031 VkDeviceSize lastSize = 0;
5032 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5034 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5036 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5041 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5046 if(it->size < lastSize)
5052 lastSize = it->size;
5059 memset(&outInfo, 0,
sizeof(outInfo));
5064 static void CalcAllocationStatInfo(
VmaStatInfo& outInfo,
const VmaDeviceMemoryBlock& block)
5068 const uint32_t rangeCount = (uint32_t)block.m_Suballocations.size();
5080 for(VmaSuballocationList::const_iterator suballocItem = block.m_Suballocations.cbegin();
5081 suballocItem != block.m_Suballocations.cend();
5084 const VmaSuballocation& suballoc = *suballocItem;
5085 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5112 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5120 VmaPool_T::VmaPool_T(
5121 VmaAllocator hAllocator,
5125 createInfo.memoryTypeIndex,
5127 VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
5128 createInfo.blockSize,
5129 createInfo.minBlockCount,
5130 createInfo.maxBlockCount,
5132 createInfo.frameInUseCount,
5137 VmaPool_T::~VmaPool_T()
5141 #if VMA_STATS_STRING_ENABLED 5143 #endif // #if VMA_STATS_STRING_ENABLED 5145 VmaBlockVector::VmaBlockVector(
5146 VmaAllocator hAllocator,
5147 uint32_t memoryTypeIndex,
5148 VMA_BLOCK_VECTOR_TYPE blockVectorType,
5149 VkDeviceSize preferredBlockSize,
5150 size_t minBlockCount,
5151 size_t maxBlockCount,
5152 VkDeviceSize bufferImageGranularity,
5153 uint32_t frameInUseCount,
5154 bool isCustomPool) :
5155 m_hAllocator(hAllocator),
5156 m_MemoryTypeIndex(memoryTypeIndex),
5157 m_BlockVectorType(blockVectorType),
5158 m_PreferredBlockSize(preferredBlockSize),
5159 m_MinBlockCount(minBlockCount),
5160 m_MaxBlockCount(maxBlockCount),
5161 m_BufferImageGranularity(bufferImageGranularity),
5162 m_FrameInUseCount(frameInUseCount),
5163 m_IsCustomPool(isCustomPool),
5164 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5165 m_HasEmptyBlock(false),
5166 m_pDefragmentator(VMA_NULL)
5170 VmaBlockVector::~VmaBlockVector()
5172 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5174 for(
size_t i = m_Blocks.size(); i--; )
5176 m_Blocks[i]->Destroy(m_hAllocator);
5177 vma_delete(m_hAllocator, m_Blocks[i]);
5181 VkResult VmaBlockVector::CreateMinBlocks()
5183 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5185 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5186 if(res != VK_SUCCESS)
5194 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5201 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5203 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5205 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5207 VMA_HEAVY_ASSERT(pBlock->Validate());
5209 const uint32_t rangeCount = (uint32_t)pBlock->m_Suballocations.size();
5211 pStats->
size += pBlock->m_Size;
5218 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5220 VkResult VmaBlockVector::Allocate(
5221 VmaPool hCurrentPool,
5222 uint32_t currentFrameIndex,
5223 const VkMemoryRequirements& vkMemReq,
5225 VmaSuballocationType suballocType,
5226 VmaAllocation* pAllocation)
5230 (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED))
5232 VMA_ASSERT(0 &&
"Usage of VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT must match VMA_POOL_CREATE_PERSISTENT_MAP_BIT.");
5233 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5236 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5240 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5242 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5243 VMA_ASSERT(pCurrBlock);
5244 VmaAllocationRequest currRequest = {};
5245 if(pCurrBlock->CreateAllocationRequest(
5248 m_BufferImageGranularity,
5256 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5259 if(pCurrBlock->IsEmpty())
5261 m_HasEmptyBlock =
false;
5264 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5265 pCurrBlock->Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5266 (*pAllocation)->InitBlockAllocation(
5275 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5276 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5281 const bool canCreateNewBlock =
5283 (m_Blocks.size() < m_MaxBlockCount);
5286 if(canCreateNewBlock)
5289 VkDeviceSize blockSize = m_PreferredBlockSize;
5290 size_t newBlockIndex = 0;
5291 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5294 if(res < 0 && m_IsCustomPool ==
false)
5298 if(blockSize >= vkMemReq.size)
5300 res = CreateBlock(blockSize, &newBlockIndex);
5305 if(blockSize >= vkMemReq.size)
5307 res = CreateBlock(blockSize, &newBlockIndex);
5312 if(res == VK_SUCCESS)
5314 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5315 VMA_ASSERT(pBlock->m_Size >= vkMemReq.size);
5318 VmaAllocationRequest allocRequest = {};
5319 allocRequest.item = pBlock->m_Suballocations.begin();
5320 allocRequest.offset = 0;
5321 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5322 pBlock->Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5323 (*pAllocation)->InitBlockAllocation(
5326 allocRequest.offset,
5332 VMA_HEAVY_ASSERT(pBlock->Validate());
5333 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5342 if(canMakeOtherLost)
5344 uint32_t tryIndex = 0;
5345 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5347 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5348 VmaAllocationRequest bestRequest = {};
5349 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5353 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5355 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5356 VMA_ASSERT(pCurrBlock);
5357 VmaAllocationRequest currRequest = {};
5358 if(pCurrBlock->CreateAllocationRequest(
5361 m_BufferImageGranularity,
5368 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5369 if(pBestRequestBlock == VMA_NULL ||
5370 currRequestCost < bestRequestCost)
5372 pBestRequestBlock = pCurrBlock;
5373 bestRequest = currRequest;
5374 bestRequestCost = currRequestCost;
5376 if(bestRequestCost == 0)
5384 if(pBestRequestBlock != VMA_NULL)
5386 if(pBestRequestBlock->MakeRequestedAllocationsLost(
5392 if(pBestRequestBlock->IsEmpty())
5394 m_HasEmptyBlock =
false;
5397 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5398 pBestRequestBlock->Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5399 (*pAllocation)->InitBlockAllocation(
5408 VMA_HEAVY_ASSERT(pBlock->Validate());
5409 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5423 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5425 return VK_ERROR_TOO_MANY_OBJECTS;
5429 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5432 void VmaBlockVector::Free(
5433 VmaAllocation hAllocation)
5435 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5439 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5441 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5443 pBlock->Free(hAllocation);
5444 VMA_HEAVY_ASSERT(pBlock->Validate());
5446 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5449 if(pBlock->IsEmpty())
5452 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5454 pBlockToDelete = pBlock;
5460 m_HasEmptyBlock =
true;
5464 IncrementallySortBlocks();
5469 if(pBlockToDelete != VMA_NULL)
5471 VMA_DEBUG_LOG(
" Deleted empty allocation");
5472 pBlockToDelete->Destroy(m_hAllocator);
5473 vma_delete(m_hAllocator, pBlockToDelete);
5477 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5479 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5481 if(m_Blocks[blockIndex] == pBlock)
5483 VmaVectorRemove(m_Blocks, blockIndex);
5490 void VmaBlockVector::IncrementallySortBlocks()
5493 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5495 if(m_Blocks[i - 1]->m_SumFreeSize > m_Blocks[i]->m_SumFreeSize)
5497 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5503 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5505 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5506 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5507 allocInfo.allocationSize = blockSize;
5508 VkDeviceMemory mem = VK_NULL_HANDLE;
5509 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
5518 void* pMappedData = VMA_NULL;
5519 const bool persistentMap = (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED);
5520 if(persistentMap && m_hAllocator->m_UnmapPersistentlyMappedMemoryCounter == 0)
5522 res = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5523 m_hAllocator->m_hDevice,
5531 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
5532 m_hAllocator->FreeVulkanMemory(m_MemoryTypeIndex, blockSize, mem);
5538 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5541 (VMA_BLOCK_VECTOR_TYPE)m_BlockVectorType,
5543 allocInfo.allocationSize,
5547 m_Blocks.push_back(pBlock);
5548 if(pNewBlockIndex != VMA_NULL)
5550 *pNewBlockIndex = m_Blocks.size() - 1;
5556 #if VMA_STATS_STRING_ENABLED 5558 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5560 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5566 json.WriteString(
"MemoryTypeIndex");
5567 json.WriteNumber(m_MemoryTypeIndex);
5569 if(m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
5571 json.WriteString(
"Mapped");
5572 json.WriteBool(
true);
5575 json.WriteString(
"BlockSize");
5576 json.WriteNumber(m_PreferredBlockSize);
5578 json.WriteString(
"BlockCount");
5579 json.BeginObject(
true);
5580 if(m_MinBlockCount > 0)
5582 json.WriteString(
"Min");
5583 json.WriteNumber(m_MinBlockCount);
5585 if(m_MaxBlockCount < SIZE_MAX)
5587 json.WriteString(
"Max");
5588 json.WriteNumber(m_MaxBlockCount);
5590 json.WriteString(
"Cur");
5591 json.WriteNumber(m_Blocks.size());
5594 if(m_FrameInUseCount > 0)
5596 json.WriteString(
"FrameInUseCount");
5597 json.WriteNumber(m_FrameInUseCount);
5602 json.WriteString(
"PreferredBlockSize");
5603 json.WriteNumber(m_PreferredBlockSize);
5606 json.WriteString(
"Blocks");
5608 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5610 m_Blocks[i]->PrintDetailedMap(json);
5617 #endif // #if VMA_STATS_STRING_ENABLED 5619 void VmaBlockVector::UnmapPersistentlyMappedMemory()
5621 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5623 for(
size_t i = m_Blocks.size(); i--; )
5625 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5626 if(pBlock->m_pMappedData != VMA_NULL)
5628 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
5629 (m_hAllocator->GetVulkanFunctions().vkUnmapMemory)(m_hAllocator->m_hDevice, pBlock->m_hMemory);
5630 pBlock->m_pMappedData = VMA_NULL;
5635 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
5637 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5639 VkResult finalResult = VK_SUCCESS;
5640 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
5642 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5643 if(pBlock->m_PersistentMap)
5645 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
5646 VkResult localResult = (*m_hAllocator->GetVulkanFunctions().vkMapMemory)(
5647 m_hAllocator->m_hDevice,
5652 &pBlock->m_pMappedData);
5653 if(localResult != VK_SUCCESS)
5655 finalResult = localResult;
5662 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5663 VmaAllocator hAllocator,
5664 uint32_t currentFrameIndex)
5666 if(m_pDefragmentator == VMA_NULL)
5668 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5674 return m_pDefragmentator;
5677 VkResult VmaBlockVector::Defragment(
5679 VkDeviceSize& maxBytesToMove,
5680 uint32_t& maxAllocationsToMove)
5682 if(m_pDefragmentator == VMA_NULL)
5687 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5690 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5693 if(pDefragmentationStats != VMA_NULL)
5695 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5696 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
5699 VMA_ASSERT(bytesMoved <= maxBytesToMove);
5700 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
5706 m_HasEmptyBlock =
false;
5707 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
5709 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
5710 if(pBlock->IsEmpty())
5712 if(m_Blocks.size() > m_MinBlockCount)
5714 if(pDefragmentationStats != VMA_NULL)
5717 pDefragmentationStats->
bytesFreed += pBlock->m_Size;
5720 VmaVectorRemove(m_Blocks, blockIndex);
5721 pBlock->Destroy(m_hAllocator);
5722 vma_delete(m_hAllocator, pBlock);
5726 m_HasEmptyBlock =
true;
5734 void VmaBlockVector::DestroyDefragmentator()
5736 if(m_pDefragmentator != VMA_NULL)
5738 vma_delete(m_hAllocator, m_pDefragmentator);
5739 m_pDefragmentator = VMA_NULL;
5743 void VmaBlockVector::MakePoolAllocationsLost(
5744 uint32_t currentFrameIndex,
5745 size_t* pLostAllocationCount)
5747 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5749 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5751 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5753 pBlock->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
5757 void VmaBlockVector::AddStats(
VmaStats* pStats)
5759 const uint32_t memTypeIndex = m_MemoryTypeIndex;
5760 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
5762 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5764 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5766 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5768 VMA_HEAVY_ASSERT(pBlock->Validate());
5770 CalcAllocationStatInfo(allocationStatInfo, *pBlock);
5771 VmaAddStatInfo(pStats->
total, allocationStatInfo);
5772 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
5773 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
5780 VmaDefragmentator::VmaDefragmentator(
5781 VmaAllocator hAllocator,
5782 VmaBlockVector* pBlockVector,
5783 uint32_t currentFrameIndex) :
5784 m_hAllocator(hAllocator),
5785 m_pBlockVector(pBlockVector),
5786 m_CurrentFrameIndex(currentFrameIndex),
5788 m_AllocationsMoved(0),
5789 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
5790 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
5794 VmaDefragmentator::~VmaDefragmentator()
5796 for(
size_t i = m_Blocks.size(); i--; )
5798 vma_delete(m_hAllocator, m_Blocks[i]);
5802 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
5804 AllocationInfo allocInfo;
5805 allocInfo.m_hAllocation = hAlloc;
5806 allocInfo.m_pChanged = pChanged;
5807 m_Allocations.push_back(allocInfo);
5810 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
5813 if(m_pMappedDataForDefragmentation)
5815 *ppMappedData = m_pMappedDataForDefragmentation;
5820 if(m_pBlock->m_PersistentMap)
5822 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
5823 *ppMappedData = m_pBlock->m_pMappedData;
5828 VkResult res = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5829 hAllocator->m_hDevice,
5830 m_pBlock->m_hMemory,
5834 &m_pMappedDataForDefragmentation);
5835 *ppMappedData = m_pMappedDataForDefragmentation;
5839 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
5841 if(m_pMappedDataForDefragmentation != VMA_NULL)
5843 (hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_pBlock->m_hMemory);
5847 VkResult VmaDefragmentator::DefragmentRound(
5848 VkDeviceSize maxBytesToMove,
5849 uint32_t maxAllocationsToMove)
5851 if(m_Blocks.empty())
5856 size_t srcBlockIndex = m_Blocks.size() - 1;
5857 size_t srcAllocIndex = SIZE_MAX;
5863 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
5865 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
5868 if(srcBlockIndex == 0)
5875 srcAllocIndex = SIZE_MAX;
5880 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
5884 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
5885 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
5887 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
5888 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
5889 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
5890 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
5893 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
5895 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
5896 VmaAllocationRequest dstAllocRequest;
5897 if(pDstBlockInfo->m_pBlock->CreateAllocationRequest(
5898 m_CurrentFrameIndex,
5899 m_pBlockVector->GetFrameInUseCount(),
5900 m_pBlockVector->GetBufferImageGranularity(),
5905 &dstAllocRequest) &&
5907 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
5909 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
5912 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
5913 (m_BytesMoved + size > maxBytesToMove))
5915 return VK_INCOMPLETE;
5918 void* pDstMappedData = VMA_NULL;
5919 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
5920 if(res != VK_SUCCESS)
5925 void* pSrcMappedData = VMA_NULL;
5926 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
5927 if(res != VK_SUCCESS)
5934 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
5935 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
5936 static_cast<size_t>(size));
5938 pDstBlockInfo->m_pBlock->Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
5939 pSrcBlockInfo->m_pBlock->Free(allocInfo.m_hAllocation);
5941 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
5943 if(allocInfo.m_pChanged != VMA_NULL)
5945 *allocInfo.m_pChanged = VK_TRUE;
5948 ++m_AllocationsMoved;
5949 m_BytesMoved += size;
5951 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
5959 if(srcAllocIndex > 0)
5965 if(srcBlockIndex > 0)
5968 srcAllocIndex = SIZE_MAX;
5978 VkResult VmaDefragmentator::Defragment(
5979 VkDeviceSize maxBytesToMove,
5980 uint32_t maxAllocationsToMove)
5982 if(m_Allocations.empty())
5988 const size_t blockCount = m_pBlockVector->m_Blocks.size();
5989 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
5991 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
5992 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
5993 m_Blocks.push_back(pBlockInfo);
5997 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6000 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6002 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6004 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6006 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6007 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6008 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6010 (*it)->m_Allocations.push_back(allocInfo);
6018 m_Allocations.clear();
6020 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6022 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6023 pBlockInfo->CalcHasNonMovableAllocations();
6024 pBlockInfo->SortAllocationsBySizeDescecnding();
6028 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6031 VkResult result = VK_SUCCESS;
6032 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6034 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6038 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6040 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6046 bool VmaDefragmentator::MoveMakesSense(
6047 size_t dstBlockIndex, VkDeviceSize dstOffset,
6048 size_t srcBlockIndex, VkDeviceSize srcOffset)
6050 if(dstBlockIndex < srcBlockIndex)
6054 if(dstBlockIndex > srcBlockIndex)
6058 if(dstOffset < srcOffset)
6070 m_PhysicalDevice(pCreateInfo->physicalDevice),
6071 m_hDevice(pCreateInfo->device),
6072 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6073 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6074 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6075 m_UnmapPersistentlyMappedMemoryCounter(0),
6076 m_PreferredLargeHeapBlockSize(0),
6077 m_PreferredSmallHeapBlockSize(0),
6078 m_CurrentFrameIndex(0),
6079 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6083 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6084 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6085 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6087 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6088 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
6090 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6092 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6103 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6104 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6113 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6115 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6116 if(limit != VK_WHOLE_SIZE)
6118 m_HeapSizeLimit[heapIndex] = limit;
6119 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6121 m_MemProps.memoryHeaps[heapIndex].size = limit;
6127 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6129 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6131 for(
size_t blockVectorTypeIndex = 0; blockVectorTypeIndex < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorTypeIndex)
6133 m_pBlockVectors[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, VmaBlockVector)(
6136 static_cast<VMA_BLOCK_VECTOR_TYPE
>(blockVectorTypeIndex),
6140 GetBufferImageGranularity(),
6145 m_pOwnAllocations[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6150 VmaAllocator_T::~VmaAllocator_T()
6152 VMA_ASSERT(m_Pools.empty());
6154 for(
size_t i = GetMemoryTypeCount(); i--; )
6156 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
6158 vma_delete(
this, m_pOwnAllocations[i][j]);
6159 vma_delete(
this, m_pBlockVectors[i][j]);
6164 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6166 #if VMA_STATIC_VULKAN_FUNCTIONS 6167 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6168 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6169 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6170 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6171 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6172 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6173 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6174 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6175 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6176 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6177 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6178 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6179 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6180 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6181 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS 6183 if(pVulkanFunctions != VMA_NULL)
6185 m_VulkanFunctions = *pVulkanFunctions;
6190 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6191 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6192 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6193 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6194 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6195 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6196 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6197 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6198 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6199 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6200 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6201 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6202 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6203 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6206 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6208 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6209 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6210 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6211 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6214 VkResult VmaAllocator_T::AllocateMemoryOfType(
6215 const VkMemoryRequirements& vkMemReq,
6217 uint32_t memTypeIndex,
6218 VmaSuballocationType suballocType,
6219 VmaAllocation* pAllocation)
6221 VMA_ASSERT(pAllocation != VMA_NULL);
6222 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6224 uint32_t blockVectorType = VmaAllocationCreateFlagsToBlockVectorType(createInfo.
flags);
6225 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6226 VMA_ASSERT(blockVector);
6228 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6230 const bool ownMemory =
6232 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
6234 vkMemReq.size > preferredBlockSize / 2);
6240 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6244 return AllocateOwnMemory(
6255 VkResult res = blockVector->Allocate(
6257 m_CurrentFrameIndex.load(),
6262 if(res == VK_SUCCESS)
6268 res = AllocateOwnMemory(
6273 createInfo.pUserData,
6275 if(res == VK_SUCCESS)
6278 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
6284 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6290 VkResult VmaAllocator_T::AllocateOwnMemory(
6292 VmaSuballocationType suballocType,
6293 uint32_t memTypeIndex,
6296 VmaAllocation* pAllocation)
6298 VMA_ASSERT(pAllocation);
6300 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6301 allocInfo.memoryTypeIndex = memTypeIndex;
6302 allocInfo.allocationSize = size;
6305 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6306 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
6309 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6313 void* pMappedData =
nullptr;
6316 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
6318 res = vkMapMemory(m_hDevice, hMemory, 0, VK_WHOLE_SIZE, 0, &pMappedData);
6321 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6322 FreeVulkanMemory(memTypeIndex, size, hMemory);
6328 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6329 (*pAllocation)->InitOwnAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
6333 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6334 AllocationVectorType* pOwnAllocations = m_pOwnAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
6335 VMA_ASSERT(pOwnAllocations);
6336 VmaVectorInsertSorted<VmaPointerLess>(*pOwnAllocations, *pAllocation);
6339 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
6344 VkResult VmaAllocator_T::AllocateMemory(
6345 const VkMemoryRequirements& vkMemReq,
6347 VmaSuballocationType suballocType,
6348 VmaAllocation* pAllocation)
6353 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6354 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6356 if((createInfo.
pool != VK_NULL_HANDLE) &&
6359 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT when pool != null is invalid.");
6360 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6363 if(createInfo.
pool != VK_NULL_HANDLE)
6365 return createInfo.
pool->m_BlockVector.Allocate(
6367 m_CurrentFrameIndex.load(),
6376 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6377 uint32_t memTypeIndex = UINT32_MAX;
6379 if(res == VK_SUCCESS)
6381 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6383 if(res == VK_SUCCESS)
6393 memoryTypeBits &= ~(1u << memTypeIndex);
6396 if(res == VK_SUCCESS)
6398 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6400 if(res == VK_SUCCESS)
6410 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6421 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6423 VMA_ASSERT(allocation);
6425 if(allocation->CanBecomeLost() ==
false ||
6426 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6428 switch(allocation->GetType())
6430 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6432 VmaBlockVector* pBlockVector = VMA_NULL;
6433 VmaPool hPool = allocation->GetPool();
6434 if(hPool != VK_NULL_HANDLE)
6436 pBlockVector = &hPool->m_BlockVector;
6440 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6441 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
6442 pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6444 pBlockVector->Free(allocation);
6447 case VmaAllocation_T::ALLOCATION_TYPE_OWN:
6448 FreeOwnMemory(allocation);
6455 vma_delete(
this, allocation);
6458 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6461 InitStatInfo(pStats->
total);
6462 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6464 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6468 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6470 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6471 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6473 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6474 VMA_ASSERT(pBlockVector);
6475 pBlockVector->AddStats(pStats);
6481 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6482 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6484 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
6489 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6491 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6492 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6493 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6495 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6496 VMA_ASSERT(pOwnAllocVector);
6497 for(
size_t allocIndex = 0, allocCount = pOwnAllocVector->size(); allocIndex < allocCount; ++allocIndex)
6500 (*pOwnAllocVector)[allocIndex]->OwnAllocCalcStatsInfo(allocationStatInfo);
6501 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6502 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6503 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6509 VmaPostprocessCalcStatInfo(pStats->
total);
6510 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6511 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6512 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6513 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6516 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6518 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
6520 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
6522 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6524 for(uint32_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
6526 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6527 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6528 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6532 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6533 AllocationVectorType* pOwnAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6534 for(
size_t ownAllocIndex = pOwnAllocationsVector->size(); ownAllocIndex--; )
6536 VmaAllocation hAlloc = (*pOwnAllocationsVector)[ownAllocIndex];
6537 hAlloc->OwnAllocUnmapPersistentlyMappedMemory(
this);
6543 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6544 pBlockVector->UnmapPersistentlyMappedMemory();
6551 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6552 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6554 m_Pools[poolIndex]->GetBlockVector().UnmapPersistentlyMappedMemory();
6561 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
6563 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
6564 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
6566 VkResult finalResult = VK_SUCCESS;
6567 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6571 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6572 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6574 m_Pools[poolIndex]->GetBlockVector().MapPersistentlyMappedMemory();
6578 for(uint32_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
6580 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6581 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6582 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6586 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6587 AllocationVectorType* pAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6588 for(
size_t ownAllocIndex = 0, ownAllocCount = pAllocationsVector->size(); ownAllocIndex < ownAllocCount; ++ownAllocIndex)
6590 VmaAllocation hAlloc = (*pAllocationsVector)[ownAllocIndex];
6591 hAlloc->OwnAllocMapPersistentlyMappedMemory(
this);
6597 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6598 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
6599 if(localResult != VK_SUCCESS)
6601 finalResult = localResult;
6613 VkResult VmaAllocator_T::Defragment(
6614 VmaAllocation* pAllocations,
6615 size_t allocationCount,
6616 VkBool32* pAllocationsChanged,
6620 if(pAllocationsChanged != VMA_NULL)
6622 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
6624 if(pDefragmentationStats != VMA_NULL)
6626 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
6629 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
6631 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
6632 return VK_ERROR_MEMORY_MAP_FAILED;
6635 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
6637 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
6639 const size_t poolCount = m_Pools.size();
6642 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
6644 VmaAllocation hAlloc = pAllocations[allocIndex];
6646 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
6648 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
6650 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
6652 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
6654 VmaBlockVector* pAllocBlockVector =
nullptr;
6656 const VmaPool hAllocPool = hAlloc->GetPool();
6658 if(hAllocPool != VK_NULL_HANDLE)
6660 pAllocBlockVector = &hAllocPool->GetBlockVector();
6665 pAllocBlockVector = m_pBlockVectors[memTypeIndex][hAlloc->GetBlockVectorType()];
6668 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
6670 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
6671 &pAllocationsChanged[allocIndex] : VMA_NULL;
6672 pDefragmentator->AddAllocation(hAlloc, pChanged);
6676 VkResult result = VK_SUCCESS;
6680 VkDeviceSize maxBytesToMove = SIZE_MAX;
6681 uint32_t maxAllocationsToMove = UINT32_MAX;
6682 if(pDefragmentationInfo != VMA_NULL)
6689 for(uint32_t memTypeIndex = 0;
6690 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
6694 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6696 for(uint32_t blockVectorType = 0;
6697 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
6700 result = m_pBlockVectors[memTypeIndex][blockVectorType]->Defragment(
6701 pDefragmentationStats,
6703 maxAllocationsToMove);
6709 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
6711 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
6712 pDefragmentationStats,
6714 maxAllocationsToMove);
6720 for(
size_t poolIndex = poolCount; poolIndex--; )
6722 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
6726 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
6728 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6730 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
6732 m_pBlockVectors[memTypeIndex][blockVectorType]->DestroyDefragmentator();
6740 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
6742 if(hAllocation->CanBecomeLost())
6748 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
6749 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
6752 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6756 pAllocationInfo->
offset = 0;
6757 pAllocationInfo->
size = hAllocation->GetSize();
6759 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6762 else if(localLastUseFrameIndex == localCurrFrameIndex)
6764 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6765 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6766 pAllocationInfo->
offset = hAllocation->GetOffset();
6767 pAllocationInfo->
size = hAllocation->GetSize();
6768 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6769 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6774 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
6776 localLastUseFrameIndex = localCurrFrameIndex;
6784 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6785 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6786 pAllocationInfo->
offset = hAllocation->GetOffset();
6787 pAllocationInfo->
size = hAllocation->GetSize();
6788 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6789 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6793 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
6795 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
6808 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
6810 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
6811 if(res != VK_SUCCESS)
6813 vma_delete(
this, *pPool);
6820 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6821 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
6827 void VmaAllocator_T::DestroyPool(VmaPool pool)
6831 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6832 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
6833 VMA_ASSERT(success &&
"Pool not found in Allocator.");
6836 vma_delete(
this, pool);
6839 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
6841 pool->m_BlockVector.GetPoolStats(pPoolStats);
6844 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
6846 m_CurrentFrameIndex.store(frameIndex);
6849 void VmaAllocator_T::MakePoolAllocationsLost(
6851 size_t* pLostAllocationCount)
6853 hPool->m_BlockVector.MakePoolAllocationsLost(
6854 m_CurrentFrameIndex.load(),
6855 pLostAllocationCount);
6858 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
6860 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
6861 (*pAllocation)->InitLost();
6864 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
6866 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
6869 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6871 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6872 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
6874 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6875 if(res == VK_SUCCESS)
6877 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
6882 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
6887 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
6890 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
6892 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
6898 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
6900 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
6902 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
6905 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
6907 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
6908 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
6910 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
6911 m_HeapSizeLimit[heapIndex] += size;
6915 void VmaAllocator_T::FreeOwnMemory(VmaAllocation allocation)
6917 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_OWN);
6919 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6921 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6922 AllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex][allocation->GetBlockVectorType()];
6923 VMA_ASSERT(pOwnAllocations);
6924 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pOwnAllocations, allocation);
6925 VMA_ASSERT(success);
6928 VkDeviceMemory hMemory = allocation->GetMemory();
6930 if(allocation->GetMappedData() != VMA_NULL)
6932 vkUnmapMemory(m_hDevice, hMemory);
6935 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
6937 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
6940 #if VMA_STATS_STRING_ENABLED 6942 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
6944 bool ownAllocationsStarted =
false;
6945 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6947 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6948 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6950 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6951 VMA_ASSERT(pOwnAllocVector);
6952 if(pOwnAllocVector->empty() ==
false)
6954 if(ownAllocationsStarted ==
false)
6956 ownAllocationsStarted =
true;
6957 json.WriteString(
"OwnAllocations");
6961 json.BeginString(
"Type ");
6962 json.ContinueString(memTypeIndex);
6963 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
6965 json.ContinueString(
" Mapped");
6971 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
6973 const VmaAllocation hAlloc = (*pOwnAllocVector)[i];
6974 json.BeginObject(
true);
6976 json.WriteString(
"Size");
6977 json.WriteNumber(hAlloc->GetSize());
6979 json.WriteString(
"Type");
6980 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
6989 if(ownAllocationsStarted)
6995 bool allocationsStarted =
false;
6996 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6998 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
7000 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
7002 if(allocationsStarted ==
false)
7004 allocationsStarted =
true;
7005 json.WriteString(
"DefaultPools");
7009 json.BeginString(
"Type ");
7010 json.ContinueString(memTypeIndex);
7011 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
7013 json.ContinueString(
" Mapped");
7017 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(json);
7021 if(allocationsStarted)
7028 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7029 const size_t poolCount = m_Pools.size();
7032 json.WriteString(
"Pools");
7034 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7036 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7043 #endif // #if VMA_STATS_STRING_ENABLED 7045 static VkResult AllocateMemoryForImage(
7046 VmaAllocator allocator,
7049 VmaSuballocationType suballocType,
7050 VmaAllocation* pAllocation)
7052 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7054 VkMemoryRequirements vkMemReq = {};
7055 (*allocator->GetVulkanFunctions().vkGetImageMemoryRequirements)(allocator->m_hDevice, image, &vkMemReq);
7057 return allocator->AllocateMemory(
7059 *pAllocationCreateInfo,
7069 VmaAllocator* pAllocator)
7071 VMA_ASSERT(pCreateInfo && pAllocator);
7072 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7078 VmaAllocator allocator)
7080 if(allocator != VK_NULL_HANDLE)
7082 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7083 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7084 vma_delete(&allocationCallbacks, allocator);
7089 VmaAllocator allocator,
7090 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7092 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7093 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7097 VmaAllocator allocator,
7098 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7100 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7101 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7105 VmaAllocator allocator,
7106 uint32_t memoryTypeIndex,
7107 VkMemoryPropertyFlags* pFlags)
7109 VMA_ASSERT(allocator && pFlags);
7110 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7111 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7115 VmaAllocator allocator,
7116 uint32_t frameIndex)
7118 VMA_ASSERT(allocator);
7119 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7121 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7123 allocator->SetCurrentFrameIndex(frameIndex);
7127 VmaAllocator allocator,
7130 VMA_ASSERT(allocator && pStats);
7131 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7132 allocator->CalculateStats(pStats);
7135 #if VMA_STATS_STRING_ENABLED 7138 VmaAllocator allocator,
7139 char** ppStatsString,
7140 VkBool32 detailedMap)
7142 VMA_ASSERT(allocator && ppStatsString);
7143 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7145 VmaStringBuilder sb(allocator);
7147 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7151 allocator->CalculateStats(&stats);
7153 json.WriteString(
"Total");
7154 VmaPrintStatInfo(json, stats.
total);
7156 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7158 json.BeginString(
"Heap ");
7159 json.ContinueString(heapIndex);
7163 json.WriteString(
"Size");
7164 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7166 json.WriteString(
"Flags");
7167 json.BeginArray(
true);
7168 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7170 json.WriteString(
"DEVICE_LOCAL");
7176 json.WriteString(
"Stats");
7177 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7180 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7182 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7184 json.BeginString(
"Type ");
7185 json.ContinueString(typeIndex);
7190 json.WriteString(
"Flags");
7191 json.BeginArray(
true);
7192 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7193 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7195 json.WriteString(
"DEVICE_LOCAL");
7197 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7199 json.WriteString(
"HOST_VISIBLE");
7201 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7203 json.WriteString(
"HOST_COHERENT");
7205 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7207 json.WriteString(
"HOST_CACHED");
7209 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7211 json.WriteString(
"LAZILY_ALLOCATED");
7217 json.WriteString(
"Stats");
7218 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7227 if(detailedMap == VK_TRUE)
7229 allocator->PrintDetailedMap(json);
7235 const size_t len = sb.GetLength();
7236 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7239 memcpy(pChars, sb.GetData(), len);
7242 *ppStatsString = pChars;
7246 VmaAllocator allocator,
7249 if(pStatsString != VMA_NULL)
7251 VMA_ASSERT(allocator);
7252 size_t len = strlen(pStatsString);
7253 vma_delete_array(allocator, pStatsString, len + 1);
7257 #endif // #if VMA_STATS_STRING_ENABLED 7262 VmaAllocator allocator,
7263 uint32_t memoryTypeBits,
7265 uint32_t* pMemoryTypeIndex)
7267 VMA_ASSERT(allocator != VK_NULL_HANDLE);
7268 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
7269 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
7271 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
7273 if(preferredFlags == 0)
7275 preferredFlags = requiredFlags;
7278 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
7281 switch(pAllocationCreateInfo->
usage)
7286 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7289 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7292 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7293 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
7296 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7297 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7305 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7308 *pMemoryTypeIndex = UINT32_MAX;
7309 uint32_t minCost = UINT32_MAX;
7310 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7311 memTypeIndex < allocator->GetMemoryTypeCount();
7312 ++memTypeIndex, memTypeBit <<= 1)
7315 if((memTypeBit & memoryTypeBits) != 0)
7317 const VkMemoryPropertyFlags currFlags =
7318 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7320 if((requiredFlags & ~currFlags) == 0)
7323 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7325 if(currCost < minCost)
7327 *pMemoryTypeIndex = memTypeIndex;
7337 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7341 VmaAllocator allocator,
7345 VMA_ASSERT(allocator && pCreateInfo && pPool);
7347 VMA_DEBUG_LOG(
"vmaCreatePool");
7349 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7351 return allocator->CreatePool(pCreateInfo, pPool);
7355 VmaAllocator allocator,
7358 VMA_ASSERT(allocator && pool);
7360 VMA_DEBUG_LOG(
"vmaDestroyPool");
7362 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7364 allocator->DestroyPool(pool);
7368 VmaAllocator allocator,
7372 VMA_ASSERT(allocator && pool && pPoolStats);
7374 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7376 allocator->GetPoolStats(pool, pPoolStats);
7380 VmaAllocator allocator,
7382 size_t* pLostAllocationCount)
7384 VMA_ASSERT(allocator && pool);
7386 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7388 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7392 VmaAllocator allocator,
7393 const VkMemoryRequirements* pVkMemoryRequirements,
7395 VmaAllocation* pAllocation,
7398 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7400 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7402 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7404 VkResult result = allocator->AllocateMemory(
7405 *pVkMemoryRequirements,
7407 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7410 if(pAllocationInfo && result == VK_SUCCESS)
7412 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7419 VmaAllocator allocator,
7422 VmaAllocation* pAllocation,
7425 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7427 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7429 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7431 VkMemoryRequirements vkMemReq = {};
7432 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, buffer, &vkMemReq);
7434 VkResult result = allocator->AllocateMemory(
7437 VMA_SUBALLOCATION_TYPE_BUFFER,
7440 if(pAllocationInfo && result == VK_SUCCESS)
7442 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7449 VmaAllocator allocator,
7452 VmaAllocation* pAllocation,
7455 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7457 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7459 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7461 VkResult result = AllocateMemoryForImage(
7465 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7468 if(pAllocationInfo && result == VK_SUCCESS)
7470 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7477 VmaAllocator allocator,
7478 VmaAllocation allocation)
7480 VMA_ASSERT(allocator && allocation);
7482 VMA_DEBUG_LOG(
"vmaFreeMemory");
7484 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7486 allocator->FreeMemory(allocation);
7490 VmaAllocator allocator,
7491 VmaAllocation allocation,
7494 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7496 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7498 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7502 VmaAllocator allocator,
7503 VmaAllocation allocation,
7506 VMA_ASSERT(allocator && allocation);
7508 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7510 allocation->SetUserData(pUserData);
7514 VmaAllocator allocator,
7515 VmaAllocation* pAllocation)
7517 VMA_ASSERT(allocator && pAllocation);
7519 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7521 allocator->CreateLostAllocation(pAllocation);
7525 VmaAllocator allocator,
7526 VmaAllocation allocation,
7529 VMA_ASSERT(allocator && allocation && ppData);
7531 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7533 return vkMapMemory(allocator->m_hDevice, allocation->GetMemory(),
7534 allocation->GetOffset(), allocation->GetSize(), 0, ppData);
7538 VmaAllocator allocator,
7539 VmaAllocation allocation)
7541 VMA_ASSERT(allocator && allocation);
7543 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7545 vkUnmapMemory(allocator->m_hDevice, allocation->GetMemory());
7550 VMA_ASSERT(allocator);
7552 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7554 allocator->UnmapPersistentlyMappedMemory();
7559 VMA_ASSERT(allocator);
7561 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7563 return allocator->MapPersistentlyMappedMemory();
7567 VmaAllocator allocator,
7568 VmaAllocation* pAllocations,
7569 size_t allocationCount,
7570 VkBool32* pAllocationsChanged,
7574 VMA_ASSERT(allocator && pAllocations);
7576 VMA_DEBUG_LOG(
"vmaDefragment");
7578 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7580 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
7584 VmaAllocator allocator,
7585 const VkBufferCreateInfo* pBufferCreateInfo,
7588 VmaAllocation* pAllocation,
7591 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
7593 VMA_DEBUG_LOG(
"vmaCreateBuffer");
7595 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7597 *pBuffer = VK_NULL_HANDLE;
7598 *pAllocation = VK_NULL_HANDLE;
7601 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
7602 allocator->m_hDevice,
7604 allocator->GetAllocationCallbacks(),
7609 VkMemoryRequirements vkMemReq = {};
7610 (*allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements)(allocator->m_hDevice, *pBuffer, &vkMemReq);
7613 res = allocator->AllocateMemory(
7615 *pAllocationCreateInfo,
7616 VMA_SUBALLOCATION_TYPE_BUFFER,
7621 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
7622 allocator->m_hDevice,
7624 (*pAllocation)->GetMemory(),
7625 (*pAllocation)->GetOffset());
7629 if(pAllocationInfo != VMA_NULL)
7631 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7635 allocator->FreeMemory(*pAllocation);
7636 *pAllocation = VK_NULL_HANDLE;
7639 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
7640 *pBuffer = VK_NULL_HANDLE;
7647 VmaAllocator allocator,
7649 VmaAllocation allocation)
7651 if(buffer != VK_NULL_HANDLE)
7653 VMA_ASSERT(allocator);
7655 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
7657 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7659 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
7661 allocator->FreeMemory(allocation);
7666 VmaAllocator allocator,
7667 const VkImageCreateInfo* pImageCreateInfo,
7670 VmaAllocation* pAllocation,
7673 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
7675 VMA_DEBUG_LOG(
"vmaCreateImage");
7677 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7679 *pImage = VK_NULL_HANDLE;
7680 *pAllocation = VK_NULL_HANDLE;
7683 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
7684 allocator->m_hDevice,
7686 allocator->GetAllocationCallbacks(),
7690 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
7691 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
7692 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
7695 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
7699 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
7700 allocator->m_hDevice,
7702 (*pAllocation)->GetMemory(),
7703 (*pAllocation)->GetOffset());
7707 if(pAllocationInfo != VMA_NULL)
7709 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7713 allocator->FreeMemory(*pAllocation);
7714 *pAllocation = VK_NULL_HANDLE;
7717 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
7718 *pImage = VK_NULL_HANDLE;
7725 VmaAllocator allocator,
7727 VmaAllocation allocation)
7729 if(image != VK_NULL_HANDLE)
7731 VMA_ASSERT(allocator);
7733 VMA_DEBUG_LOG(
"vmaDestroyImage");
7735 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7737 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
7739 allocator->FreeMemory(allocation);
7743 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:476
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:499
Definition: vk_mem_alloc.h:828
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
uint32_t BlockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:612
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:486
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:679
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:480
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:949
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1099
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:880
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:728
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:761
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:445
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:511
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:830
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave #define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:558
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:493
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:508
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:483
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:473
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1103
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:528
VmaStatInfo total
Definition: vk_mem_alloc.h:630
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1111
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:744
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1094
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:484
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:502
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:834
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:959
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:481
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:763
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:850
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:886
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:837
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
struct VmaVulkanFunctions VmaVulkanFunctions
Definition: vk_mem_alloc.h:737
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1089
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VkDeviceSize AllocationSizeMax
Definition: vk_mem_alloc.h:621
Definition: vk_mem_alloc.h:808
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1107
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:482
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:626
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:717
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1109
VmaMemoryUsage
Definition: vk_mem_alloc.h:665
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:755
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:469
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VmaAllocatorFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:464
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:477
Definition: vk_mem_alloc.h:609
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:845
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:456
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:460
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:840
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:622
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:439
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:750
Definition: vk_mem_alloc.h:741
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:479
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:858
VkDeviceSize AllocationSizeMin
Definition: vk_mem_alloc.h:621
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:514
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:889
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:768
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:546
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:628
VkDeviceSize AllocationSizeAvg
Definition: vk_mem_alloc.h:621
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:488
uint32_t AllocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:614
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:458
Definition: vk_mem_alloc.h:735
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:487
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:872
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:496
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
VkDeviceSize UsedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:618
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:970
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:696
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:505
uint32_t UnusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:616
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:877
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:673
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
struct VmaStats VmaStats
General statistics from current state of Allocator.
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:622
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:954
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1105
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
Definition: vk_mem_alloc.h:475
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:739
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:485
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:489
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:799
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:965
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
No intended memory usage specified.
Definition: vk_mem_alloc.h:668
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:478
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
Definition: vk_mem_alloc.h:680
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:935
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:676
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:684
Definition: vk_mem_alloc.h:471
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:707
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:670
struct VmaStatInfo VmaStatInfo
VkDeviceSize UnusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:620
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:629
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:883
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:826
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:622
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:940
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.