23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 404 #include <vulkan/vulkan.h> 411 VK_DEFINE_HANDLE(VmaAllocator)
415 VmaAllocator allocator,
417 VkDeviceMemory memory,
421 VmaAllocator allocator,
423 VkDeviceMemory memory,
492 VmaAllocator* pAllocator);
496 VmaAllocator allocator);
503 VmaAllocator allocator,
504 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
511 VmaAllocator allocator,
512 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
521 VmaAllocator allocator,
522 uint32_t memoryTypeIndex,
523 VkMemoryPropertyFlags* pFlags);
534 VmaAllocator allocator,
535 uint32_t frameIndex);
563 VmaAllocator allocator,
566 #define VMA_STATS_STRING_ENABLED 1 568 #if VMA_STATS_STRING_ENABLED 574 VmaAllocator allocator,
575 char** ppStatsString,
576 VkBool32 detailedMap);
579 VmaAllocator allocator,
582 #endif // #if VMA_STATS_STRING_ENABLED 591 VK_DEFINE_HANDLE(VmaPool)
714 VmaAllocator allocator,
715 uint32_t memoryTypeBits,
717 uint32_t* pMemoryTypeIndex);
827 VmaAllocator allocator,
834 VmaAllocator allocator,
844 VmaAllocator allocator,
855 VmaAllocator allocator,
857 size_t* pLostAllocationCount);
859 VK_DEFINE_HANDLE(VmaAllocation)
912 VmaAllocator allocator,
913 const VkMemoryRequirements* pVkMemoryRequirements,
915 VmaAllocation* pAllocation,
925 VmaAllocator allocator,
928 VmaAllocation* pAllocation,
933 VmaAllocator allocator,
936 VmaAllocation* pAllocation,
941 VmaAllocator allocator,
942 VmaAllocation allocation);
946 VmaAllocator allocator,
947 VmaAllocation allocation,
952 VmaAllocator allocator,
953 VmaAllocation allocation,
967 VmaAllocator allocator,
968 VmaAllocation* pAllocation);
979 VmaAllocator allocator,
980 VmaAllocation allocation,
984 VmaAllocator allocator,
985 VmaAllocation allocation);
1113 VmaAllocator allocator,
1114 VmaAllocation* pAllocations,
1115 size_t allocationCount,
1116 VkBool32* pAllocationsChanged,
1146 VmaAllocator allocator,
1147 const VkBufferCreateInfo* pBufferCreateInfo,
1150 VmaAllocation* pAllocation,
1154 VmaAllocator allocator,
1156 VmaAllocation allocation);
1160 VmaAllocator allocator,
1161 const VkImageCreateInfo* pImageCreateInfo,
1164 VmaAllocation* pAllocation,
1168 VmaAllocator allocator,
1170 VmaAllocation allocation);
1174 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1177 #ifdef __INTELLISENSE__ 1178 #define VMA_IMPLEMENTATION 1181 #ifdef VMA_IMPLEMENTATION 1182 #undef VMA_IMPLEMENTATION 1204 #if VMA_USE_STL_CONTAINERS 1205 #define VMA_USE_STL_VECTOR 1 1206 #define VMA_USE_STL_UNORDERED_MAP 1 1207 #define VMA_USE_STL_LIST 1 1210 #if VMA_USE_STL_VECTOR 1214 #if VMA_USE_STL_UNORDERED_MAP 1215 #include <unordered_map> 1218 #if VMA_USE_STL_LIST 1227 #include <algorithm> 1231 #if !defined(_WIN32) 1238 #define VMA_ASSERT(expr) assert(expr) 1240 #define VMA_ASSERT(expr) 1246 #ifndef VMA_HEAVY_ASSERT 1248 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1250 #define VMA_HEAVY_ASSERT(expr) 1256 #define VMA_NULL nullptr 1259 #ifndef VMA_ALIGN_OF 1260 #define VMA_ALIGN_OF(type) (__alignof(type)) 1263 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1265 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1267 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1271 #ifndef VMA_SYSTEM_FREE 1273 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1275 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1280 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1284 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1288 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1292 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1295 #ifndef VMA_DEBUG_LOG 1296 #define VMA_DEBUG_LOG(format, ...) 1306 #if VMA_STATS_STRING_ENABLED 1307 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1309 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1311 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1313 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1315 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1317 snprintf(outStr, strLen,
"%p", ptr);
1327 void Lock() { m_Mutex.lock(); }
1328 void Unlock() { m_Mutex.unlock(); }
1332 #define VMA_MUTEX VmaMutex 1343 #ifndef VMA_ATOMIC_UINT32 1344 #define VMA_ATOMIC_UINT32 std::atomic_uint32_t 1347 #ifndef VMA_BEST_FIT 1360 #define VMA_BEST_FIT (1) 1363 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 1368 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 1371 #ifndef VMA_DEBUG_ALIGNMENT 1376 #define VMA_DEBUG_ALIGNMENT (1) 1379 #ifndef VMA_DEBUG_MARGIN 1384 #define VMA_DEBUG_MARGIN (0) 1387 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1392 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1395 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1400 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1403 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1404 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1408 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1409 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1413 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1414 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1418 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1424 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1425 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1428 static inline uint32_t CountBitsSet(uint32_t v)
1430 uint32_t c = v - ((v >> 1) & 0x55555555);
1431 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1432 c = ((c >> 4) + c) & 0x0F0F0F0F;
1433 c = ((c >> 8) + c) & 0x00FF00FF;
1434 c = ((c >> 16) + c) & 0x0000FFFF;
1440 template <
typename T>
1441 static inline T VmaAlignUp(T val, T align)
1443 return (val + align - 1) / align * align;
1447 template <
typename T>
1448 inline T VmaRoundDiv(T x, T y)
1450 return (x + (y / (T)2)) / y;
1455 template<
typename Iterator,
typename Compare>
1456 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1458 Iterator centerValue = end; --centerValue;
1459 Iterator insertIndex = beg;
1460 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1462 if(cmp(*memTypeIndex, *centerValue))
1464 if(insertIndex != memTypeIndex)
1466 VMA_SWAP(*memTypeIndex, *insertIndex);
1471 if(insertIndex != centerValue)
1473 VMA_SWAP(*insertIndex, *centerValue);
1478 template<
typename Iterator,
typename Compare>
1479 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1483 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1484 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1485 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1489 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1491 #endif // #ifndef VMA_SORT 1500 static inline bool VmaBlocksOnSamePage(
1501 VkDeviceSize resourceAOffset,
1502 VkDeviceSize resourceASize,
1503 VkDeviceSize resourceBOffset,
1504 VkDeviceSize pageSize)
1506 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1507 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1508 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1509 VkDeviceSize resourceBStart = resourceBOffset;
1510 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1511 return resourceAEndPage == resourceBStartPage;
1514 enum VmaSuballocationType
1516 VMA_SUBALLOCATION_TYPE_FREE = 0,
1517 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1518 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1519 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1520 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1521 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1522 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1531 static inline bool VmaIsBufferImageGranularityConflict(
1532 VmaSuballocationType suballocType1,
1533 VmaSuballocationType suballocType2)
1535 if(suballocType1 > suballocType2)
1537 VMA_SWAP(suballocType1, suballocType2);
1540 switch(suballocType1)
1542 case VMA_SUBALLOCATION_TYPE_FREE:
1544 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1546 case VMA_SUBALLOCATION_TYPE_BUFFER:
1548 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1549 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1550 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1552 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1553 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1554 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1555 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1557 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1558 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1570 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1571 m_pMutex(useMutex ? &mutex : VMA_NULL)
1588 VMA_MUTEX* m_pMutex;
1591 #if VMA_DEBUG_GLOBAL_MUTEX 1592 static VMA_MUTEX gDebugGlobalMutex;
1593 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 1595 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1599 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1610 template <
typename IterT,
typename KeyT,
typename CmpT>
1611 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1613 size_t down = 0, up = (end - beg);
1616 const size_t mid = (down + up) / 2;
1617 if(cmp(*(beg+mid), key))
1632 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1634 if((pAllocationCallbacks != VMA_NULL) &&
1635 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1637 return (*pAllocationCallbacks->pfnAllocation)(
1638 pAllocationCallbacks->pUserData,
1641 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1645 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1649 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1651 if((pAllocationCallbacks != VMA_NULL) &&
1652 (pAllocationCallbacks->pfnFree != VMA_NULL))
1654 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1658 VMA_SYSTEM_FREE(ptr);
1662 template<
typename T>
1663 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1665 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1668 template<
typename T>
1669 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1671 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1674 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1676 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1678 template<
typename T>
1679 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1682 VmaFree(pAllocationCallbacks, ptr);
1685 template<
typename T>
1686 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1690 for(
size_t i = count; i--; )
1694 VmaFree(pAllocationCallbacks, ptr);
1699 template<
typename T>
1700 class VmaStlAllocator
1703 const VkAllocationCallbacks*
const m_pCallbacks;
1704 typedef T value_type;
1706 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1707 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1709 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1710 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1712 template<
typename U>
1713 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1715 return m_pCallbacks == rhs.m_pCallbacks;
1717 template<
typename U>
1718 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1720 return m_pCallbacks != rhs.m_pCallbacks;
1723 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1726 #if VMA_USE_STL_VECTOR 1728 #define VmaVector std::vector 1730 template<
typename T,
typename allocatorT>
1731 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1733 vec.insert(vec.begin() + index, item);
1736 template<
typename T,
typename allocatorT>
1737 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1739 vec.erase(vec.begin() + index);
1742 #else // #if VMA_USE_STL_VECTOR 1747 template<
typename T,
typename AllocatorT>
1751 typedef T value_type;
1753 VmaVector(
const AllocatorT& allocator) :
1754 m_Allocator(allocator),
1761 VmaVector(
size_t count,
const AllocatorT& allocator) :
1762 m_Allocator(allocator),
1763 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
1769 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1770 m_Allocator(src.m_Allocator),
1771 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
1772 m_Count(src.m_Count),
1773 m_Capacity(src.m_Count)
1777 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1783 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1786 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1790 resize(rhs.m_Count);
1793 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1799 bool empty()
const {
return m_Count == 0; }
1800 size_t size()
const {
return m_Count; }
1801 T* data() {
return m_pArray; }
1802 const T* data()
const {
return m_pArray; }
1804 T& operator[](
size_t index)
1806 VMA_HEAVY_ASSERT(index < m_Count);
1807 return m_pArray[index];
1809 const T& operator[](
size_t index)
const 1811 VMA_HEAVY_ASSERT(index < m_Count);
1812 return m_pArray[index];
1817 VMA_HEAVY_ASSERT(m_Count > 0);
1820 const T& front()
const 1822 VMA_HEAVY_ASSERT(m_Count > 0);
1827 VMA_HEAVY_ASSERT(m_Count > 0);
1828 return m_pArray[m_Count - 1];
1830 const T& back()
const 1832 VMA_HEAVY_ASSERT(m_Count > 0);
1833 return m_pArray[m_Count - 1];
1836 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1838 newCapacity = VMA_MAX(newCapacity, m_Count);
1840 if((newCapacity < m_Capacity) && !freeMemory)
1842 newCapacity = m_Capacity;
1845 if(newCapacity != m_Capacity)
1847 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1850 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1852 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1853 m_Capacity = newCapacity;
1854 m_pArray = newArray;
1858 void resize(
size_t newCount,
bool freeMemory =
false)
1860 size_t newCapacity = m_Capacity;
1861 if(newCount > m_Capacity)
1863 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1867 newCapacity = newCount;
1870 if(newCapacity != m_Capacity)
1872 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1873 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1874 if(elementsToCopy != 0)
1876 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1878 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1879 m_Capacity = newCapacity;
1880 m_pArray = newArray;
1886 void clear(
bool freeMemory =
false)
1888 resize(0, freeMemory);
1891 void insert(
size_t index,
const T& src)
1893 VMA_HEAVY_ASSERT(index <= m_Count);
1894 const size_t oldCount = size();
1895 resize(oldCount + 1);
1896 if(index < oldCount)
1898 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1900 m_pArray[index] = src;
1903 void remove(
size_t index)
1905 VMA_HEAVY_ASSERT(index < m_Count);
1906 const size_t oldCount = size();
1907 if(index < oldCount - 1)
1909 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1911 resize(oldCount - 1);
1914 void push_back(
const T& src)
1916 const size_t newIndex = size();
1917 resize(newIndex + 1);
1918 m_pArray[newIndex] = src;
1923 VMA_HEAVY_ASSERT(m_Count > 0);
1927 void push_front(
const T& src)
1934 VMA_HEAVY_ASSERT(m_Count > 0);
1938 typedef T* iterator;
1940 iterator begin() {
return m_pArray; }
1941 iterator end() {
return m_pArray + m_Count; }
1944 AllocatorT m_Allocator;
1950 template<
typename T,
typename allocatorT>
1951 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
1953 vec.insert(index, item);
1956 template<
typename T,
typename allocatorT>
1957 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
1962 #endif // #if VMA_USE_STL_VECTOR 1964 template<
typename CmpLess,
typename VectorT>
1965 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
1967 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
1969 vector.data() + vector.size(),
1971 CmpLess()) - vector.data();
1972 VmaVectorInsert(vector, indexToInsert, value);
1973 return indexToInsert;
1976 template<
typename CmpLess,
typename VectorT>
1977 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
1980 VectorT::iterator it = VmaBinaryFindFirstNotLess(
1982 vector.data() + vector.size(),
1985 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
1987 size_t indexToRemove = it - vector.begin();
1988 VmaVectorRemove(vector, indexToRemove);
1994 template<
typename CmpLess,
typename VectorT>
1995 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
1998 VectorT::iterator it = VmaBinaryFindFirstNotLess(
2000 vector.data() + vector.size(),
2003 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2005 return it - vector.begin();
2009 return vector.size();
2021 template<
typename T>
2022 class VmaPoolAllocator
2025 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2026 ~VmaPoolAllocator();
2034 uint32_t NextFreeIndex;
2041 uint32_t FirstFreeIndex;
2044 const VkAllocationCallbacks* m_pAllocationCallbacks;
2045 size_t m_ItemsPerBlock;
2046 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2048 ItemBlock& CreateNewBlock();
2051 template<
typename T>
2052 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2053 m_pAllocationCallbacks(pAllocationCallbacks),
2054 m_ItemsPerBlock(itemsPerBlock),
2055 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2057 VMA_ASSERT(itemsPerBlock > 0);
2060 template<
typename T>
2061 VmaPoolAllocator<T>::~VmaPoolAllocator()
2066 template<
typename T>
2067 void VmaPoolAllocator<T>::Clear()
2069 for(
size_t i = m_ItemBlocks.size(); i--; )
2070 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2071 m_ItemBlocks.clear();
2074 template<
typename T>
2075 T* VmaPoolAllocator<T>::Alloc()
2077 for(
size_t i = m_ItemBlocks.size(); i--; )
2079 ItemBlock& block = m_ItemBlocks[i];
2081 if(block.FirstFreeIndex != UINT32_MAX)
2083 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2084 block.FirstFreeIndex = pItem->NextFreeIndex;
2085 return &pItem->Value;
2090 ItemBlock& newBlock = CreateNewBlock();
2091 Item*
const pItem = &newBlock.pItems[0];
2092 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2093 return &pItem->Value;
2096 template<
typename T>
2097 void VmaPoolAllocator<T>::Free(T* ptr)
2100 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2102 ItemBlock& block = m_ItemBlocks[i];
2106 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2109 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2111 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2112 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2113 block.FirstFreeIndex = index;
2117 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2120 template<
typename T>
2121 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2123 ItemBlock newBlock = {
2124 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2126 m_ItemBlocks.push_back(newBlock);
2129 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2130 newBlock.pItems[i].NextFreeIndex = i + 1;
2131 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2132 return m_ItemBlocks.back();
2138 #if VMA_USE_STL_LIST 2140 #define VmaList std::list 2142 #else // #if VMA_USE_STL_LIST 2144 template<
typename T>
2153 template<
typename T>
2157 typedef VmaListItem<T> ItemType;
2159 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2163 size_t GetCount()
const {
return m_Count; }
2164 bool IsEmpty()
const {
return m_Count == 0; }
2166 ItemType* Front() {
return m_pFront; }
2167 const ItemType* Front()
const {
return m_pFront; }
2168 ItemType* Back() {
return m_pBack; }
2169 const ItemType* Back()
const {
return m_pBack; }
2171 ItemType* PushBack();
2172 ItemType* PushFront();
2173 ItemType* PushBack(
const T& value);
2174 ItemType* PushFront(
const T& value);
2179 ItemType* InsertBefore(ItemType* pItem);
2181 ItemType* InsertAfter(ItemType* pItem);
2183 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2184 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2186 void Remove(ItemType* pItem);
2189 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2190 VmaPoolAllocator<ItemType> m_ItemAllocator;
2196 VmaRawList(
const VmaRawList<T>& src);
2197 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2200 template<
typename T>
2201 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2202 m_pAllocationCallbacks(pAllocationCallbacks),
2203 m_ItemAllocator(pAllocationCallbacks, 128),
2210 template<
typename T>
2211 VmaRawList<T>::~VmaRawList()
2217 template<
typename T>
2218 void VmaRawList<T>::Clear()
2220 if(IsEmpty() ==
false)
2222 ItemType* pItem = m_pBack;
2223 while(pItem != VMA_NULL)
2225 ItemType*
const pPrevItem = pItem->pPrev;
2226 m_ItemAllocator.Free(pItem);
2229 m_pFront = VMA_NULL;
2235 template<
typename T>
2236 VmaListItem<T>* VmaRawList<T>::PushBack()
2238 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2239 pNewItem->pNext = VMA_NULL;
2242 pNewItem->pPrev = VMA_NULL;
2243 m_pFront = pNewItem;
2249 pNewItem->pPrev = m_pBack;
2250 m_pBack->pNext = pNewItem;
2257 template<
typename T>
2258 VmaListItem<T>* VmaRawList<T>::PushFront()
2260 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2261 pNewItem->pPrev = VMA_NULL;
2264 pNewItem->pNext = VMA_NULL;
2265 m_pFront = pNewItem;
2271 pNewItem->pNext = m_pFront;
2272 m_pFront->pPrev = pNewItem;
2273 m_pFront = pNewItem;
2279 template<
typename T>
2280 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2282 ItemType*
const pNewItem = PushBack();
2283 pNewItem->Value = value;
2287 template<
typename T>
2288 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2290 ItemType*
const pNewItem = PushFront();
2291 pNewItem->Value = value;
2295 template<
typename T>
2296 void VmaRawList<T>::PopBack()
2298 VMA_HEAVY_ASSERT(m_Count > 0);
2299 ItemType*
const pBackItem = m_pBack;
2300 ItemType*
const pPrevItem = pBackItem->pPrev;
2301 if(pPrevItem != VMA_NULL)
2303 pPrevItem->pNext = VMA_NULL;
2305 m_pBack = pPrevItem;
2306 m_ItemAllocator.Free(pBackItem);
2310 template<
typename T>
2311 void VmaRawList<T>::PopFront()
2313 VMA_HEAVY_ASSERT(m_Count > 0);
2314 ItemType*
const pFrontItem = m_pFront;
2315 ItemType*
const pNextItem = pFrontItem->pNext;
2316 if(pNextItem != VMA_NULL)
2318 pNextItem->pPrev = VMA_NULL;
2320 m_pFront = pNextItem;
2321 m_ItemAllocator.Free(pFrontItem);
2325 template<
typename T>
2326 void VmaRawList<T>::Remove(ItemType* pItem)
2328 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2329 VMA_HEAVY_ASSERT(m_Count > 0);
2331 if(pItem->pPrev != VMA_NULL)
2333 pItem->pPrev->pNext = pItem->pNext;
2337 VMA_HEAVY_ASSERT(m_pFront == pItem);
2338 m_pFront = pItem->pNext;
2341 if(pItem->pNext != VMA_NULL)
2343 pItem->pNext->pPrev = pItem->pPrev;
2347 VMA_HEAVY_ASSERT(m_pBack == pItem);
2348 m_pBack = pItem->pPrev;
2351 m_ItemAllocator.Free(pItem);
2355 template<
typename T>
2356 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2358 if(pItem != VMA_NULL)
2360 ItemType*
const prevItem = pItem->pPrev;
2361 ItemType*
const newItem = m_ItemAllocator.Alloc();
2362 newItem->pPrev = prevItem;
2363 newItem->pNext = pItem;
2364 pItem->pPrev = newItem;
2365 if(prevItem != VMA_NULL)
2367 prevItem->pNext = newItem;
2371 VMA_HEAVY_ASSERT(m_pFront == pItem);
2381 template<
typename T>
2382 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2384 if(pItem != VMA_NULL)
2386 ItemType*
const nextItem = pItem->pNext;
2387 ItemType*
const newItem = m_ItemAllocator.Alloc();
2388 newItem->pNext = nextItem;
2389 newItem->pPrev = pItem;
2390 pItem->pNext = newItem;
2391 if(nextItem != VMA_NULL)
2393 nextItem->pPrev = newItem;
2397 VMA_HEAVY_ASSERT(m_pBack == pItem);
2407 template<
typename T>
2408 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2410 ItemType*
const newItem = InsertBefore(pItem);
2411 newItem->Value = value;
2415 template<
typename T>
2416 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2418 ItemType*
const newItem = InsertAfter(pItem);
2419 newItem->Value = value;
2423 template<
typename T,
typename AllocatorT>
2436 T& operator*()
const 2438 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2439 return m_pItem->Value;
2441 T* operator->()
const 2443 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2444 return &m_pItem->Value;
2447 iterator& operator++()
2449 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2450 m_pItem = m_pItem->pNext;
2453 iterator& operator--()
2455 if(m_pItem != VMA_NULL)
2457 m_pItem = m_pItem->pPrev;
2461 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2462 m_pItem = m_pList->Back();
2467 iterator operator++(
int)
2469 iterator result = *
this;
2473 iterator operator--(
int)
2475 iterator result = *
this;
2480 bool operator==(
const iterator& rhs)
const 2482 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2483 return m_pItem == rhs.m_pItem;
2485 bool operator!=(
const iterator& rhs)
const 2487 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2488 return m_pItem != rhs.m_pItem;
2492 VmaRawList<T>* m_pList;
2493 VmaListItem<T>* m_pItem;
2495 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2501 friend class VmaList<T, AllocatorT>;
2504 class const_iterator
2513 const_iterator(
const iterator& src) :
2514 m_pList(src.m_pList),
2515 m_pItem(src.m_pItem)
2519 const T& operator*()
const 2521 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2522 return m_pItem->Value;
2524 const T* operator->()
const 2526 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2527 return &m_pItem->Value;
2530 const_iterator& operator++()
2532 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2533 m_pItem = m_pItem->pNext;
2536 const_iterator& operator--()
2538 if(m_pItem != VMA_NULL)
2540 m_pItem = m_pItem->pPrev;
2544 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2545 m_pItem = m_pList->Back();
2550 const_iterator operator++(
int)
2552 const_iterator result = *
this;
2556 const_iterator operator--(
int)
2558 const_iterator result = *
this;
2563 bool operator==(
const const_iterator& rhs)
const 2565 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2566 return m_pItem == rhs.m_pItem;
2568 bool operator!=(
const const_iterator& rhs)
const 2570 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2571 return m_pItem != rhs.m_pItem;
2575 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2581 const VmaRawList<T>* m_pList;
2582 const VmaListItem<T>* m_pItem;
2584 friend class VmaList<T, AllocatorT>;
2587 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2589 bool empty()
const {
return m_RawList.IsEmpty(); }
2590 size_t size()
const {
return m_RawList.GetCount(); }
2592 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2593 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2595 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2596 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2598 void clear() { m_RawList.Clear(); }
2599 void push_back(
const T& value) { m_RawList.PushBack(value); }
2600 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2601 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2604 VmaRawList<T> m_RawList;
2607 #endif // #if VMA_USE_STL_LIST 2615 #if VMA_USE_STL_UNORDERED_MAP 2617 #define VmaPair std::pair 2619 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2620 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2622 #else // #if VMA_USE_STL_UNORDERED_MAP 2624 template<
typename T1,
typename T2>
2630 VmaPair() : first(), second() { }
2631 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2637 template<
typename KeyT,
typename ValueT>
2641 typedef VmaPair<KeyT, ValueT> PairType;
2642 typedef PairType* iterator;
2644 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2646 iterator begin() {
return m_Vector.begin(); }
2647 iterator end() {
return m_Vector.end(); }
2649 void insert(
const PairType& pair);
2650 iterator find(
const KeyT& key);
2651 void erase(iterator it);
2654 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2657 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2659 template<
typename FirstT,
typename SecondT>
2660 struct VmaPairFirstLess
2662 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2664 return lhs.first < rhs.first;
2666 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2668 return lhs.first < rhsFirst;
2672 template<
typename KeyT,
typename ValueT>
2673 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2675 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2677 m_Vector.data() + m_Vector.size(),
2679 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2680 VmaVectorInsert(m_Vector, indexToInsert, pair);
2683 template<
typename KeyT,
typename ValueT>
2684 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2686 PairType* it = VmaBinaryFindFirstNotLess(
2688 m_Vector.data() + m_Vector.size(),
2690 VmaPairFirstLess<KeyT, ValueT>());
2691 if((it != m_Vector.end()) && (it->first == key))
2697 return m_Vector.end();
2701 template<
typename KeyT,
typename ValueT>
2702 void VmaMap<KeyT, ValueT>::erase(iterator it)
2704 VmaVectorRemove(m_Vector, it - m_Vector.begin());
2707 #endif // #if VMA_USE_STL_UNORDERED_MAP 2713 class VmaDeviceMemoryBlock;
2715 enum VMA_BLOCK_VECTOR_TYPE
2717 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2718 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2719 VMA_BLOCK_VECTOR_TYPE_COUNT
2725 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2726 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2729 struct VmaAllocation_T
2732 enum ALLOCATION_TYPE
2734 ALLOCATION_TYPE_NONE,
2735 ALLOCATION_TYPE_BLOCK,
2736 ALLOCATION_TYPE_OWN,
2739 VmaAllocation_T(uint32_t currentFrameIndex) :
2742 m_pUserData(VMA_NULL),
2743 m_Type(ALLOCATION_TYPE_NONE),
2744 m_SuballocationType(VMA_SUBALLOCATION_TYPE_UNKNOWN),
2745 m_LastUseFrameIndex(currentFrameIndex)
2749 void InitBlockAllocation(
2751 VmaDeviceMemoryBlock* block,
2752 VkDeviceSize offset,
2753 VkDeviceSize alignment,
2755 VmaSuballocationType suballocationType,
2759 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2760 VMA_ASSERT(block != VMA_NULL);
2761 m_Type = ALLOCATION_TYPE_BLOCK;
2762 m_Alignment = alignment;
2764 m_pUserData = pUserData;
2765 m_SuballocationType = suballocationType;
2766 m_BlockAllocation.m_hPool = hPool;
2767 m_BlockAllocation.m_Block = block;
2768 m_BlockAllocation.m_Offset = offset;
2769 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
2774 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2775 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
2776 m_Type = ALLOCATION_TYPE_BLOCK;
2777 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
2778 m_BlockAllocation.m_Block = VMA_NULL;
2779 m_BlockAllocation.m_Offset = 0;
2780 m_BlockAllocation.m_CanBecomeLost =
true;
2783 void ChangeBlockAllocation(
2784 VmaDeviceMemoryBlock* block,
2785 VkDeviceSize offset)
2787 VMA_ASSERT(block != VMA_NULL);
2788 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2789 m_BlockAllocation.m_Block = block;
2790 m_BlockAllocation.m_Offset = offset;
2793 void InitOwnAllocation(
2794 uint32_t memoryTypeIndex,
2795 VkDeviceMemory hMemory,
2796 VmaSuballocationType suballocationType,
2802 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2803 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2804 m_Type = ALLOCATION_TYPE_OWN;
2807 m_pUserData = pUserData;
2808 m_SuballocationType = suballocationType;
2809 m_OwnAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2810 m_OwnAllocation.m_hMemory = hMemory;
2811 m_OwnAllocation.m_PersistentMap = persistentMap;
2812 m_OwnAllocation.m_pMappedData = pMappedData;
2815 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2816 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2817 VkDeviceSize GetSize()
const {
return m_Size; }
2818 void* GetUserData()
const {
return m_pUserData; }
2819 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2820 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2822 VmaDeviceMemoryBlock* GetBlock()
const 2824 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2825 return m_BlockAllocation.m_Block;
2827 VkDeviceSize GetOffset()
const;
2828 VkDeviceMemory GetMemory()
const;
2829 uint32_t GetMemoryTypeIndex()
const;
2830 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2831 void* GetMappedData()
const;
2832 bool CanBecomeLost()
const;
2833 VmaPool GetPool()
const;
2835 VkResult OwnAllocMapPersistentlyMappedMemory(VkDevice hDevice)
2837 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2838 if(m_OwnAllocation.m_PersistentMap)
2840 return vkMapMemory(hDevice, m_OwnAllocation.m_hMemory, 0, VK_WHOLE_SIZE, 0, &m_OwnAllocation.m_pMappedData);
2844 void OwnAllocUnmapPersistentlyMappedMemory(VkDevice hDevice)
2846 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2847 if(m_OwnAllocation.m_pMappedData)
2849 VMA_ASSERT(m_OwnAllocation.m_PersistentMap);
2850 vkUnmapMemory(hDevice, m_OwnAllocation.m_hMemory);
2851 m_OwnAllocation.m_pMappedData = VMA_NULL;
2855 uint32_t GetLastUseFrameIndex()
const 2857 return m_LastUseFrameIndex.load();
2859 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
2861 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
2871 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
2874 VkDeviceSize m_Alignment;
2875 VkDeviceSize m_Size;
2877 ALLOCATION_TYPE m_Type;
2878 VmaSuballocationType m_SuballocationType;
2879 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
2882 struct BlockAllocation
2885 VmaDeviceMemoryBlock* m_Block;
2886 VkDeviceSize m_Offset;
2887 bool m_CanBecomeLost;
2891 struct OwnAllocation
2893 uint32_t m_MemoryTypeIndex;
2894 VkDeviceMemory m_hMemory;
2895 bool m_PersistentMap;
2896 void* m_pMappedData;
2902 BlockAllocation m_BlockAllocation;
2904 OwnAllocation m_OwnAllocation;
2912 struct VmaSuballocation
2914 VkDeviceSize offset;
2916 VmaAllocation hAllocation;
2917 VmaSuballocationType type;
2920 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
2923 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
2938 struct VmaAllocationRequest
2940 VkDeviceSize offset;
2941 VkDeviceSize sumFreeSize;
2942 VkDeviceSize sumItemSize;
2943 VmaSuballocationList::iterator item;
2944 size_t itemsToMakeLostCount;
2946 VkDeviceSize CalcCost()
const 2948 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
2958 class VmaDeviceMemoryBlock
2961 uint32_t m_MemoryTypeIndex;
2962 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
2963 VkDeviceMemory m_hMemory;
2964 VkDeviceSize m_Size;
2965 bool m_PersistentMap;
2966 void* m_pMappedData;
2967 uint32_t m_FreeCount;
2968 VkDeviceSize m_SumFreeSize;
2969 VmaSuballocationList m_Suballocations;
2972 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
2974 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
2976 ~VmaDeviceMemoryBlock()
2978 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
2983 uint32_t newMemoryTypeIndex,
2984 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
2985 VkDeviceMemory newMemory,
2986 VkDeviceSize newSize,
2990 void Destroy(VmaAllocator allocator);
2993 bool Validate()
const;
2998 bool CreateAllocationRequest(
2999 uint32_t currentFrameIndex,
3000 uint32_t frameInUseCount,
3001 VkDeviceSize bufferImageGranularity,
3002 VkDeviceSize allocSize,
3003 VkDeviceSize allocAlignment,
3004 VmaSuballocationType allocType,
3005 bool canMakeOtherLost,
3006 VmaAllocationRequest* pAllocationRequest);
3008 bool MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest);
3010 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3013 bool IsEmpty()
const;
3018 const VmaAllocationRequest& request,
3019 VmaSuballocationType type,
3020 VkDeviceSize allocSize,
3021 VmaAllocation hAllocation);
3024 void Free(
const VmaAllocation allocation);
3026 #if VMA_STATS_STRING_ENABLED 3027 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3033 bool CheckAllocation(
3034 uint32_t currentFrameIndex,
3035 uint32_t frameInUseCount,
3036 VkDeviceSize bufferImageGranularity,
3037 VkDeviceSize allocSize,
3038 VkDeviceSize allocAlignment,
3039 VmaSuballocationType allocType,
3040 VmaSuballocationList::const_iterator suballocItem,
3041 bool canMakeOtherLost,
3042 VkDeviceSize* pOffset,
3043 size_t* itemsToMakeLostCount,
3044 VkDeviceSize* pSumFreeSize,
3045 VkDeviceSize* pSumItemSize)
const;
3048 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3052 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3055 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3058 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3060 bool ValidateFreeSuballocationList()
const;
3063 struct VmaPointerLess
3065 bool operator()(
const void* lhs,
const void* rhs)
const 3071 class VmaDefragmentator;
3079 struct VmaBlockVector
3082 VmaAllocator hAllocator,
3083 uint32_t memoryTypeIndex,
3084 VMA_BLOCK_VECTOR_TYPE blockVectorType,
3085 VkDeviceSize preferredBlockSize,
3086 size_t minBlockCount,
3087 size_t maxBlockCount,
3088 VkDeviceSize bufferImageGranularity,
3089 uint32_t frameInUseCount,
3093 VkResult CreateMinBlocks();
3095 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3096 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3097 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3098 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3099 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const {
return m_BlockVectorType; }
3103 bool IsEmpty()
const {
return m_Blocks.empty(); }
3106 VmaPool hCurrentPool,
3107 uint32_t currentFrameIndex,
3108 const VkMemoryRequirements& vkMemReq,
3110 VmaSuballocationType suballocType,
3111 VmaAllocation* pAllocation);
3114 VmaAllocation hAllocation);
3117 void AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex);
3119 #if VMA_STATS_STRING_ENABLED 3120 void PrintDetailedMap(
class VmaJsonWriter& json);
3123 void UnmapPersistentlyMappedMemory();
3124 VkResult MapPersistentlyMappedMemory();
3126 void MakePoolAllocationsLost(
3127 uint32_t currentFrameIndex,
3128 size_t* pLostAllocationCount);
3130 VmaDefragmentator* EnsureDefragmentator(
3132 const VkAllocationCallbacks* pAllocationCallbacks,
3133 uint32_t currentFrameIndex);
3135 VkResult Defragment(
3137 VkDeviceSize& maxBytesToMove,
3138 uint32_t& maxAllocationsToMove);
3140 void DestroyDefragmentator();
3143 friend class VmaDefragmentator;
3145 const VmaAllocator m_hAllocator;
3146 const uint32_t m_MemoryTypeIndex;
3147 const VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3148 const VkDeviceSize m_PreferredBlockSize;
3149 const size_t m_MinBlockCount;
3150 const size_t m_MaxBlockCount;
3151 const VkDeviceSize m_BufferImageGranularity;
3152 const uint32_t m_FrameInUseCount;
3153 const bool m_IsCustomPool;
3156 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3160 bool m_HasEmptyBlock;
3161 VmaDefragmentator* m_pDefragmentator;
3164 void Remove(VmaDeviceMemoryBlock* pBlock);
3168 void IncrementallySortBlocks();
3170 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3176 VmaBlockVector m_BlockVector;
3180 VmaAllocator hAllocator,
3184 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3186 #if VMA_STATS_STRING_ENABLED 3191 class VmaDefragmentator
3193 const VkDevice m_hDevice;
3194 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3195 VmaBlockVector*
const m_pBlockVector;
3196 uint32_t m_CurrentFrameIndex;
3197 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3198 VkDeviceSize m_BytesMoved;
3199 uint32_t m_AllocationsMoved;
3201 struct AllocationInfo
3203 VmaAllocation m_hAllocation;
3204 VkBool32* m_pChanged;
3207 m_hAllocation(VK_NULL_HANDLE),
3208 m_pChanged(VMA_NULL)
3213 struct AllocationInfoSizeGreater
3215 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3217 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3222 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3226 VmaDeviceMemoryBlock* m_pBlock;
3227 bool m_HasNonMovableAllocations;
3228 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3230 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3232 m_HasNonMovableAllocations(true),
3233 m_Allocations(pAllocationCallbacks),
3234 m_pMappedDataForDefragmentation(VMA_NULL)
3238 void CalcHasNonMovableAllocations()
3240 const size_t blockAllocCount =
3241 m_pBlock->m_Suballocations.size() - m_pBlock->m_FreeCount;
3242 const size_t defragmentAllocCount = m_Allocations.size();
3243 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3246 void SortAllocationsBySizeDescecnding()
3248 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3251 VkResult EnsureMapping(VkDevice hDevice,
void** ppMappedData)
3254 if(m_pMappedDataForDefragmentation)
3256 *ppMappedData = m_pMappedDataForDefragmentation;
3261 if(m_pBlock->m_PersistentMap)
3263 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
3264 *ppMappedData = m_pBlock->m_pMappedData;
3269 VkResult res = vkMapMemory(hDevice, m_pBlock->m_hMemory, 0, VK_WHOLE_SIZE, 0, &m_pMappedDataForDefragmentation);
3270 *ppMappedData = m_pMappedDataForDefragmentation;
3274 void Unmap(VkDevice hDevice)
3276 if(m_pMappedDataForDefragmentation != VMA_NULL)
3278 vkUnmapMemory(hDevice, m_pBlock->m_hMemory);
3284 void* m_pMappedDataForDefragmentation;
3287 struct BlockPointerLess
3289 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3291 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3293 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3295 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3301 struct BlockInfoCompareMoveDestination
3303 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3305 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3309 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3313 if(pLhsBlockInfo->m_pBlock->m_SumFreeSize < pRhsBlockInfo->m_pBlock->m_SumFreeSize)
3321 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3322 BlockInfoVector m_Blocks;
3324 VkResult DefragmentRound(
3325 VkDeviceSize maxBytesToMove,
3326 uint32_t maxAllocationsToMove);
3328 static bool MoveMakesSense(
3329 size_t dstBlockIndex, VkDeviceSize dstOffset,
3330 size_t srcBlockIndex, VkDeviceSize srcOffset);
3335 const VkAllocationCallbacks* pAllocationCallbacks,
3336 VmaBlockVector* pBlockVector,
3337 uint32_t currentFrameIndex);
3339 ~VmaDefragmentator();
3341 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3342 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3344 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3346 VkResult Defragment(
3347 VkDeviceSize maxBytesToMove,
3348 uint32_t maxAllocationsToMove);
3352 struct VmaAllocator_T
3356 bool m_AllocationCallbacksSpecified;
3357 VkAllocationCallbacks m_AllocationCallbacks;
3361 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
3363 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3364 VkPhysicalDeviceMemoryProperties m_MemProps;
3366 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3369 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3370 AllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
3371 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
3376 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3378 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3381 VkDeviceSize GetBufferImageGranularity()
const 3384 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3385 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3388 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3389 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3392 VkResult AllocateMemory(
3393 const VkMemoryRequirements& vkMemReq,
3395 VmaSuballocationType suballocType,
3396 VmaAllocation* pAllocation);
3399 void FreeMemory(
const VmaAllocation allocation);
3401 void CalculateStats(
VmaStats* pStats);
3403 #if VMA_STATS_STRING_ENABLED 3404 void PrintDetailedMap(
class VmaJsonWriter& json);
3407 void UnmapPersistentlyMappedMemory();
3408 VkResult MapPersistentlyMappedMemory();
3410 VkResult Defragment(
3411 VmaAllocation* pAllocations,
3412 size_t allocationCount,
3413 VkBool32* pAllocationsChanged,
3417 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3420 void DestroyPool(VmaPool pool);
3421 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3423 void SetCurrentFrameIndex(uint32_t frameIndex);
3425 void MakePoolAllocationsLost(
3427 size_t* pLostAllocationCount);
3429 void CreateLostAllocation(VmaAllocation* pAllocation);
3432 VkDeviceSize m_PreferredLargeHeapBlockSize;
3433 VkDeviceSize m_PreferredSmallHeapBlockSize;
3435 VkPhysicalDevice m_PhysicalDevice;
3436 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3438 VMA_MUTEX m_PoolsMutex;
3440 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3442 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3444 VkResult AllocateMemoryOfType(
3445 const VkMemoryRequirements& vkMemReq,
3447 uint32_t memTypeIndex,
3448 VmaSuballocationType suballocType,
3449 VmaAllocation* pAllocation);
3452 VkResult AllocateOwnMemory(
3454 VmaSuballocationType suballocType,
3455 uint32_t memTypeIndex,
3458 VmaAllocation* pAllocation);
3461 void FreeOwnMemory(VmaAllocation allocation);
3467 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3469 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3472 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3474 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3477 template<
typename T>
3478 static T* VmaAllocate(VmaAllocator hAllocator)
3480 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3483 template<
typename T>
3484 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3486 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3489 template<
typename T>
3490 static void vma_delete(VmaAllocator hAllocator, T* ptr)
3495 VmaFree(hAllocator, ptr);
3499 template<
typename T>
3500 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
3504 for(
size_t i = count; i--; )
3506 VmaFree(hAllocator, ptr);
3513 #if VMA_STATS_STRING_ENABLED 3515 class VmaStringBuilder
3518 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
3519 size_t GetLength()
const {
return m_Data.size(); }
3520 const char* GetData()
const {
return m_Data.data(); }
3522 void Add(
char ch) { m_Data.push_back(ch); }
3523 void Add(
const char* pStr);
3524 void AddNewLine() { Add(
'\n'); }
3525 void AddNumber(uint32_t num);
3526 void AddNumber(uint64_t num);
3527 void AddPointer(
const void* ptr);
3530 VmaVector< char, VmaStlAllocator<char> > m_Data;
3533 void VmaStringBuilder::Add(
const char* pStr)
3535 const size_t strLen = strlen(pStr);
3538 const size_t oldCount = m_Data.size();
3539 m_Data.resize(oldCount + strLen);
3540 memcpy(m_Data.data() + oldCount, pStr, strLen);
3544 void VmaStringBuilder::AddNumber(uint32_t num)
3547 VmaUint32ToStr(buf,
sizeof(buf), num);
3551 void VmaStringBuilder::AddNumber(uint64_t num)
3554 VmaUint64ToStr(buf,
sizeof(buf), num);
3558 void VmaStringBuilder::AddPointer(
const void* ptr)
3561 VmaPtrToStr(buf,
sizeof(buf), ptr);
3565 #endif // #if VMA_STATS_STRING_ENABLED 3570 #if VMA_STATS_STRING_ENABLED 3575 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
3578 void BeginObject(
bool singleLine =
false);
3581 void BeginArray(
bool singleLine =
false);
3584 void WriteString(
const char* pStr);
3585 void BeginString(
const char* pStr = VMA_NULL);
3586 void ContinueString(
const char* pStr);
3587 void ContinueString(uint32_t n);
3588 void ContinueString(uint64_t n);
3589 void EndString(
const char* pStr = VMA_NULL);
3591 void WriteNumber(uint32_t n);
3592 void WriteNumber(uint64_t n);
3593 void WriteBool(
bool b);
3597 static const char*
const INDENT;
3599 enum COLLECTION_TYPE
3601 COLLECTION_TYPE_OBJECT,
3602 COLLECTION_TYPE_ARRAY,
3606 COLLECTION_TYPE type;
3607 uint32_t valueCount;
3608 bool singleLineMode;
3611 VmaStringBuilder& m_SB;
3612 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
3613 bool m_InsideString;
3615 void BeginValue(
bool isString);
3616 void WriteIndent(
bool oneLess =
false);
3619 const char*
const VmaJsonWriter::INDENT =
" ";
3621 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
3623 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
3624 m_InsideString(false)
3628 VmaJsonWriter::~VmaJsonWriter()
3630 VMA_ASSERT(!m_InsideString);
3631 VMA_ASSERT(m_Stack.empty());
3634 void VmaJsonWriter::BeginObject(
bool singleLine)
3636 VMA_ASSERT(!m_InsideString);
3642 item.type = COLLECTION_TYPE_OBJECT;
3643 item.valueCount = 0;
3644 item.singleLineMode = singleLine;
3645 m_Stack.push_back(item);
3648 void VmaJsonWriter::EndObject()
3650 VMA_ASSERT(!m_InsideString);
3655 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
3659 void VmaJsonWriter::BeginArray(
bool singleLine)
3661 VMA_ASSERT(!m_InsideString);
3667 item.type = COLLECTION_TYPE_ARRAY;
3668 item.valueCount = 0;
3669 item.singleLineMode = singleLine;
3670 m_Stack.push_back(item);
3673 void VmaJsonWriter::EndArray()
3675 VMA_ASSERT(!m_InsideString);
3680 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
3684 void VmaJsonWriter::WriteString(
const char* pStr)
3690 void VmaJsonWriter::BeginString(
const char* pStr)
3692 VMA_ASSERT(!m_InsideString);
3696 m_InsideString =
true;
3697 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3699 ContinueString(pStr);
3703 void VmaJsonWriter::ContinueString(
const char* pStr)
3705 VMA_ASSERT(m_InsideString);
3707 const size_t strLen = strlen(pStr);
3708 for(
size_t i = 0; i < strLen; ++i)
3735 VMA_ASSERT(0 &&
"Character not currently supported.");
3741 void VmaJsonWriter::ContinueString(uint32_t n)
3743 VMA_ASSERT(m_InsideString);
3747 void VmaJsonWriter::ContinueString(uint64_t n)
3749 VMA_ASSERT(m_InsideString);
3753 void VmaJsonWriter::EndString(
const char* pStr)
3755 VMA_ASSERT(m_InsideString);
3756 if(pStr != VMA_NULL && pStr[0] !=
'\0')
3758 ContinueString(pStr);
3761 m_InsideString =
false;
3764 void VmaJsonWriter::WriteNumber(uint32_t n)
3766 VMA_ASSERT(!m_InsideString);
3771 void VmaJsonWriter::WriteNumber(uint64_t n)
3773 VMA_ASSERT(!m_InsideString);
3778 void VmaJsonWriter::WriteBool(
bool b)
3780 VMA_ASSERT(!m_InsideString);
3782 m_SB.Add(b ?
"true" :
"false");
3785 void VmaJsonWriter::WriteNull()
3787 VMA_ASSERT(!m_InsideString);
3792 void VmaJsonWriter::BeginValue(
bool isString)
3794 if(!m_Stack.empty())
3796 StackItem& currItem = m_Stack.back();
3797 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3798 currItem.valueCount % 2 == 0)
3800 VMA_ASSERT(isString);
3803 if(currItem.type == COLLECTION_TYPE_OBJECT &&
3804 currItem.valueCount % 2 != 0)
3808 else if(currItem.valueCount > 0)
3817 ++currItem.valueCount;
3821 void VmaJsonWriter::WriteIndent(
bool oneLess)
3823 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
3827 size_t count = m_Stack.size();
3828 if(count > 0 && oneLess)
3832 for(
size_t i = 0; i < count; ++i)
3839 #endif // #if VMA_STATS_STRING_ENABLED 3843 VkDeviceSize VmaAllocation_T::GetOffset()
const 3847 case ALLOCATION_TYPE_BLOCK:
3848 return m_BlockAllocation.m_Offset;
3849 case ALLOCATION_TYPE_OWN:
3857 VkDeviceMemory VmaAllocation_T::GetMemory()
const 3861 case ALLOCATION_TYPE_BLOCK:
3862 return m_BlockAllocation.m_Block->m_hMemory;
3863 case ALLOCATION_TYPE_OWN:
3864 return m_OwnAllocation.m_hMemory;
3867 return VK_NULL_HANDLE;
3871 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 3875 case ALLOCATION_TYPE_BLOCK:
3876 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
3877 case ALLOCATION_TYPE_OWN:
3878 return m_OwnAllocation.m_MemoryTypeIndex;
3885 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 3889 case ALLOCATION_TYPE_BLOCK:
3890 return m_BlockAllocation.m_Block->m_BlockVectorType;
3891 case ALLOCATION_TYPE_OWN:
3892 return (m_OwnAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
3895 return VMA_BLOCK_VECTOR_TYPE_COUNT;
3899 void* VmaAllocation_T::GetMappedData()
const 3903 case ALLOCATION_TYPE_BLOCK:
3904 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
3906 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
3913 case ALLOCATION_TYPE_OWN:
3914 return m_OwnAllocation.m_pMappedData;
3921 bool VmaAllocation_T::CanBecomeLost()
const 3925 case ALLOCATION_TYPE_BLOCK:
3926 return m_BlockAllocation.m_CanBecomeLost;
3927 case ALLOCATION_TYPE_OWN:
3935 VmaPool VmaAllocation_T::GetPool()
const 3937 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3938 return m_BlockAllocation.m_hPool;
3941 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
3943 VMA_ASSERT(CanBecomeLost());
3949 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
3952 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
3957 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
3963 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
3973 #if VMA_STATS_STRING_ENABLED 3976 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
3985 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
3989 json.WriteString(
"Blocks");
3992 json.WriteString(
"Allocations");
3995 json.WriteString(
"UnusedRanges");
3998 json.WriteString(
"UsedBytes");
4001 json.WriteString(
"UnusedBytes");
4004 json.WriteString(
"AllocationSize");
4005 json.BeginObject(
true);
4006 json.WriteString(
"Min");
4008 json.WriteString(
"Avg");
4010 json.WriteString(
"Max");
4014 json.WriteString(
"UnusedRangeSize");
4015 json.BeginObject(
true);
4016 json.WriteString(
"Min");
4018 json.WriteString(
"Avg");
4020 json.WriteString(
"Max");
4027 #endif // #if VMA_STATS_STRING_ENABLED 4029 struct VmaSuballocationItemSizeLess
4032 const VmaSuballocationList::iterator lhs,
4033 const VmaSuballocationList::iterator rhs)
const 4035 return lhs->size < rhs->size;
4038 const VmaSuballocationList::iterator lhs,
4039 VkDeviceSize rhsSize)
const 4041 return lhs->size < rhsSize;
4045 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
4046 m_MemoryTypeIndex(UINT32_MAX),
4047 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
4048 m_hMemory(VK_NULL_HANDLE),
4050 m_PersistentMap(false),
4051 m_pMappedData(VMA_NULL),
4054 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4055 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4059 void VmaDeviceMemoryBlock::Init(
4060 uint32_t newMemoryTypeIndex,
4061 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
4062 VkDeviceMemory newMemory,
4063 VkDeviceSize newSize,
4067 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4069 m_MemoryTypeIndex = newMemoryTypeIndex;
4070 m_BlockVectorType = newBlockVectorType;
4071 m_hMemory = newMemory;
4073 m_PersistentMap = persistentMap;
4074 m_pMappedData = pMappedData;
4076 m_SumFreeSize = newSize;
4078 m_Suballocations.clear();
4079 m_FreeSuballocationsBySize.clear();
4081 VmaSuballocation suballoc = {};
4082 suballoc.offset = 0;
4083 suballoc.size = newSize;
4084 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4085 suballoc.hAllocation = VK_NULL_HANDLE;
4087 m_Suballocations.push_back(suballoc);
4088 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4090 m_FreeSuballocationsBySize.push_back(suballocItem);
4093 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
4097 VMA_ASSERT(IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
4099 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
4100 if(m_pMappedData != VMA_NULL)
4102 vkUnmapMemory(allocator->m_hDevice, m_hMemory);
4103 m_pMappedData = VMA_NULL;
4107 if(allocator->m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
4109 (*allocator->m_DeviceMemoryCallbacks.pfnFree)(allocator, m_MemoryTypeIndex, m_hMemory, m_Size);
4112 vkFreeMemory(allocator->m_hDevice, m_hMemory, allocator->GetAllocationCallbacks());
4113 m_hMemory = VK_NULL_HANDLE;
4116 bool VmaDeviceMemoryBlock::Validate()
const 4118 if((m_hMemory == VK_NULL_HANDLE) ||
4120 m_Suballocations.empty())
4126 VkDeviceSize calculatedOffset = 0;
4128 uint32_t calculatedFreeCount = 0;
4130 VkDeviceSize calculatedSumFreeSize = 0;
4133 size_t freeSuballocationsToRegister = 0;
4135 bool prevFree =
false;
4137 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4138 suballocItem != m_Suballocations.cend();
4141 const VmaSuballocation& subAlloc = *suballocItem;
4144 if(subAlloc.offset != calculatedOffset)
4149 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4151 if(prevFree && currFree)
4155 prevFree = currFree;
4157 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4164 calculatedSumFreeSize += subAlloc.size;
4165 ++calculatedFreeCount;
4166 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4168 ++freeSuballocationsToRegister;
4172 calculatedOffset += subAlloc.size;
4177 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4182 VkDeviceSize lastSize = 0;
4183 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4185 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4188 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4193 if(suballocItem->size < lastSize)
4198 lastSize = suballocItem->size;
4203 (calculatedOffset == m_Size) &&
4204 (calculatedSumFreeSize == m_SumFreeSize) &&
4205 (calculatedFreeCount == m_FreeCount);
4218 bool VmaDeviceMemoryBlock::CreateAllocationRequest(
4219 uint32_t currentFrameIndex,
4220 uint32_t frameInUseCount,
4221 VkDeviceSize bufferImageGranularity,
4222 VkDeviceSize allocSize,
4223 VkDeviceSize allocAlignment,
4224 VmaSuballocationType allocType,
4225 bool canMakeOtherLost,
4226 VmaAllocationRequest* pAllocationRequest)
4228 VMA_ASSERT(allocSize > 0);
4229 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4230 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4231 VMA_HEAVY_ASSERT(Validate());
4234 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4240 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4241 if(freeSuballocCount > 0)
4246 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4247 m_FreeSuballocationsBySize.data(),
4248 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4250 VmaSuballocationItemSizeLess());
4251 size_t index = it - m_FreeSuballocationsBySize.data();
4252 for(; index < freeSuballocCount; ++index)
4257 bufferImageGranularity,
4261 m_FreeSuballocationsBySize[index],
4263 &pAllocationRequest->offset,
4264 &pAllocationRequest->itemsToMakeLostCount,
4265 &pAllocationRequest->sumFreeSize,
4266 &pAllocationRequest->sumItemSize))
4268 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4276 for(
size_t index = freeSuballocCount; index--; )
4281 bufferImageGranularity,
4285 m_FreeSuballocationsBySize[index],
4287 &pAllocationRequest->offset,
4288 &pAllocationRequest->itemsToMakeLostCount,
4289 &pAllocationRequest->sumFreeSize,
4290 &pAllocationRequest->sumItemSize))
4292 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
4299 if(canMakeOtherLost)
4303 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
4304 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
4306 VmaAllocationRequest tmpAllocRequest = {};
4307 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
4308 suballocIt != m_Suballocations.end();
4311 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
4312 suballocIt->hAllocation->CanBecomeLost())
4317 bufferImageGranularity,
4323 &tmpAllocRequest.offset,
4324 &tmpAllocRequest.itemsToMakeLostCount,
4325 &tmpAllocRequest.sumFreeSize,
4326 &tmpAllocRequest.sumItemSize))
4328 tmpAllocRequest.item = suballocIt;
4330 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
4332 *pAllocationRequest = tmpAllocRequest;
4338 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
4347 bool VmaDeviceMemoryBlock::MakeRequestedAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount, VmaAllocationRequest* pAllocationRequest)
4349 while(pAllocationRequest->itemsToMakeLostCount > 0)
4351 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
4353 ++pAllocationRequest->item;
4355 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4356 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
4357 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
4358 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4360 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
4361 --pAllocationRequest->itemsToMakeLostCount;
4369 VMA_HEAVY_ASSERT(Validate());
4370 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
4371 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
4376 uint32_t VmaDeviceMemoryBlock::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4378 uint32_t lostAllocationCount = 0;
4379 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
4380 it != m_Suballocations.end();
4383 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
4384 it->hAllocation->CanBecomeLost() &&
4385 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
4387 it = FreeSuballocation(it);
4388 ++lostAllocationCount;
4391 return lostAllocationCount;
4394 bool VmaDeviceMemoryBlock::CheckAllocation(
4395 uint32_t currentFrameIndex,
4396 uint32_t frameInUseCount,
4397 VkDeviceSize bufferImageGranularity,
4398 VkDeviceSize allocSize,
4399 VkDeviceSize allocAlignment,
4400 VmaSuballocationType allocType,
4401 VmaSuballocationList::const_iterator suballocItem,
4402 bool canMakeOtherLost,
4403 VkDeviceSize* pOffset,
4404 size_t* itemsToMakeLostCount,
4405 VkDeviceSize* pSumFreeSize,
4406 VkDeviceSize* pSumItemSize)
const 4408 VMA_ASSERT(allocSize > 0);
4409 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4410 VMA_ASSERT(suballocItem != m_Suballocations.cend());
4411 VMA_ASSERT(pOffset != VMA_NULL);
4413 *itemsToMakeLostCount = 0;
4417 if(canMakeOtherLost)
4419 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4421 *pSumFreeSize = suballocItem->size;
4425 if(suballocItem->hAllocation->CanBecomeLost() &&
4426 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4428 ++*itemsToMakeLostCount;
4429 *pSumItemSize = suballocItem->size;
4438 if(m_Size - suballocItem->offset < allocSize)
4444 *pOffset = suballocItem->offset;
4447 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4449 *pOffset += VMA_DEBUG_MARGIN;
4453 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4454 *pOffset = VmaAlignUp(*pOffset, alignment);
4458 if(bufferImageGranularity > 1)
4460 bool bufferImageGranularityConflict =
false;
4461 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4462 while(prevSuballocItem != m_Suballocations.cbegin())
4465 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4466 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4468 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4470 bufferImageGranularityConflict =
true;
4478 if(bufferImageGranularityConflict)
4480 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4486 if(*pOffset >= suballocItem->offset + suballocItem->size)
4492 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
4495 VmaSuballocationList::const_iterator next = suballocItem;
4497 const VkDeviceSize requiredEndMargin =
4498 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4500 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
4502 if(suballocItem->offset + totalSize > m_Size)
4509 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
4510 if(totalSize > suballocItem->size)
4512 VkDeviceSize remainingSize = totalSize - suballocItem->size;
4513 while(remainingSize > 0)
4516 if(lastSuballocItem == m_Suballocations.cend())
4520 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4522 *pSumFreeSize += lastSuballocItem->size;
4526 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
4527 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
4528 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4530 ++*itemsToMakeLostCount;
4531 *pSumItemSize += lastSuballocItem->size;
4538 remainingSize = (lastSuballocItem->size < remainingSize) ?
4539 remainingSize - lastSuballocItem->size : 0;
4545 if(bufferImageGranularity > 1)
4547 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
4549 while(nextSuballocItem != m_Suballocations.cend())
4551 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4552 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4554 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4556 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
4557 if(nextSuballoc.hAllocation->CanBecomeLost() &&
4558 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
4560 ++*itemsToMakeLostCount;
4579 const VmaSuballocation& suballoc = *suballocItem;
4580 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4582 *pSumFreeSize = suballoc.size;
4585 if(suballoc.size < allocSize)
4591 *pOffset = suballoc.offset;
4594 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
4596 *pOffset += VMA_DEBUG_MARGIN;
4600 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
4601 *pOffset = VmaAlignUp(*pOffset, alignment);
4605 if(bufferImageGranularity > 1)
4607 bool bufferImageGranularityConflict =
false;
4608 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
4609 while(prevSuballocItem != m_Suballocations.cbegin())
4612 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
4613 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
4615 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
4617 bufferImageGranularityConflict =
true;
4625 if(bufferImageGranularityConflict)
4627 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
4632 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
4635 VmaSuballocationList::const_iterator next = suballocItem;
4637 const VkDeviceSize requiredEndMargin =
4638 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
4641 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
4648 if(bufferImageGranularity > 1)
4650 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
4652 while(nextSuballocItem != m_Suballocations.cend())
4654 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
4655 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
4657 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
4676 bool VmaDeviceMemoryBlock::IsEmpty()
const 4678 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4681 void VmaDeviceMemoryBlock::Alloc(
4682 const VmaAllocationRequest& request,
4683 VmaSuballocationType type,
4684 VkDeviceSize allocSize,
4685 VmaAllocation hAllocation)
4687 VMA_ASSERT(request.item != m_Suballocations.end());
4688 VmaSuballocation& suballoc = *request.item;
4690 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
4692 VMA_ASSERT(request.offset >= suballoc.offset);
4693 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
4694 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
4695 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
4699 UnregisterFreeSuballocation(request.item);
4701 suballoc.offset = request.offset;
4702 suballoc.size = allocSize;
4703 suballoc.type = type;
4704 suballoc.hAllocation = hAllocation;
4709 VmaSuballocation paddingSuballoc = {};
4710 paddingSuballoc.offset = request.offset + allocSize;
4711 paddingSuballoc.size = paddingEnd;
4712 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4713 VmaSuballocationList::iterator next = request.item;
4715 const VmaSuballocationList::iterator paddingEndItem =
4716 m_Suballocations.insert(next, paddingSuballoc);
4717 RegisterFreeSuballocation(paddingEndItem);
4723 VmaSuballocation paddingSuballoc = {};
4724 paddingSuballoc.offset = request.offset - paddingBegin;
4725 paddingSuballoc.size = paddingBegin;
4726 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4727 const VmaSuballocationList::iterator paddingBeginItem =
4728 m_Suballocations.insert(request.item, paddingSuballoc);
4729 RegisterFreeSuballocation(paddingBeginItem);
4733 m_FreeCount = m_FreeCount - 1;
4734 if(paddingBegin > 0)
4742 m_SumFreeSize -= allocSize;
4745 VmaSuballocationList::iterator VmaDeviceMemoryBlock::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
4748 VmaSuballocation& suballoc = *suballocItem;
4749 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4750 suballoc.hAllocation = VK_NULL_HANDLE;
4754 m_SumFreeSize += suballoc.size;
4757 bool mergeWithNext =
false;
4758 bool mergeWithPrev =
false;
4760 VmaSuballocationList::iterator nextItem = suballocItem;
4762 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
4764 mergeWithNext =
true;
4767 VmaSuballocationList::iterator prevItem = suballocItem;
4768 if(suballocItem != m_Suballocations.begin())
4771 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
4773 mergeWithPrev =
true;
4779 UnregisterFreeSuballocation(nextItem);
4780 MergeFreeWithNext(suballocItem);
4785 UnregisterFreeSuballocation(prevItem);
4786 MergeFreeWithNext(prevItem);
4787 RegisterFreeSuballocation(prevItem);
4792 RegisterFreeSuballocation(suballocItem);
4793 return suballocItem;
4797 void VmaDeviceMemoryBlock::Free(
const VmaAllocation allocation)
4799 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
4800 suballocItem != m_Suballocations.end();
4803 VmaSuballocation& suballoc = *suballocItem;
4804 if(suballoc.hAllocation == allocation)
4806 FreeSuballocation(suballocItem);
4807 VMA_HEAVY_ASSERT(Validate());
4811 VMA_ASSERT(0 &&
"Not found!");
4814 #if VMA_STATS_STRING_ENABLED 4816 void VmaDeviceMemoryBlock::PrintDetailedMap(
class VmaJsonWriter& json)
const 4820 json.WriteString(
"TotalBytes");
4821 json.WriteNumber(m_Size);
4823 json.WriteString(
"UnusedBytes");
4824 json.WriteNumber(m_SumFreeSize);
4826 json.WriteString(
"Allocations");
4827 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4829 json.WriteString(
"UnusedRanges");
4830 json.WriteNumber(m_FreeCount);
4832 json.WriteString(
"Suballocations");
4835 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4836 suballocItem != m_Suballocations.cend();
4837 ++suballocItem, ++i)
4839 json.BeginObject(
true);
4841 json.WriteString(
"Type");
4842 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4844 json.WriteString(
"Size");
4845 json.WriteNumber(suballocItem->size);
4847 json.WriteString(
"Offset");
4848 json.WriteNumber(suballocItem->offset);
4857 #endif // #if VMA_STATS_STRING_ENABLED 4859 void VmaDeviceMemoryBlock::MergeFreeWithNext(VmaSuballocationList::iterator item)
4861 VMA_ASSERT(item != m_Suballocations.end());
4862 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4864 VmaSuballocationList::iterator nextItem = item;
4866 VMA_ASSERT(nextItem != m_Suballocations.end());
4867 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
4869 item->size += nextItem->size;
4871 m_Suballocations.erase(nextItem);
4874 void VmaDeviceMemoryBlock::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
4876 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4877 VMA_ASSERT(item->size > 0);
4881 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
4883 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4885 if(m_FreeSuballocationsBySize.empty())
4887 m_FreeSuballocationsBySize.push_back(item);
4891 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
4899 void VmaDeviceMemoryBlock::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
4901 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
4902 VMA_ASSERT(item->size > 0);
4906 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
4908 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4910 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4911 m_FreeSuballocationsBySize.data(),
4912 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
4914 VmaSuballocationItemSizeLess());
4915 for(
size_t index = it - m_FreeSuballocationsBySize.data();
4916 index < m_FreeSuballocationsBySize.size();
4919 if(m_FreeSuballocationsBySize[index] == item)
4921 VmaVectorRemove(m_FreeSuballocationsBySize, index);
4924 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
4926 VMA_ASSERT(0 &&
"Not found.");
4932 bool VmaDeviceMemoryBlock::ValidateFreeSuballocationList()
const 4934 VkDeviceSize lastSize = 0;
4935 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
4937 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
4939 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
4944 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4949 if(it->size < lastSize)
4955 lastSize = it->size;
4962 memset(&outInfo, 0,
sizeof(outInfo));
4967 static void CalcAllocationStatInfo(
VmaStatInfo& outInfo,
const VmaDeviceMemoryBlock& alloc)
4971 const uint32_t rangeCount = (uint32_t)alloc.m_Suballocations.size();
4983 for(VmaSuballocationList::const_iterator suballocItem = alloc.m_Suballocations.cbegin();
4984 suballocItem != alloc.m_Suballocations.cend();
4987 const VmaSuballocation& suballoc = *suballocItem;
4988 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5015 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5023 VmaPool_T::VmaPool_T(
5024 VmaAllocator hAllocator,
5028 createInfo.memoryTypeIndex,
5030 VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
5031 createInfo.blockSize,
5032 createInfo.minBlockCount,
5033 createInfo.maxBlockCount,
5035 createInfo.frameInUseCount,
5040 VmaPool_T::~VmaPool_T()
5044 #if VMA_STATS_STRING_ENABLED 5061 #endif // #if VMA_STATS_STRING_ENABLED 5063 VmaBlockVector::VmaBlockVector(
5064 VmaAllocator hAllocator,
5065 uint32_t memoryTypeIndex,
5066 VMA_BLOCK_VECTOR_TYPE blockVectorType,
5067 VkDeviceSize preferredBlockSize,
5068 size_t minBlockCount,
5069 size_t maxBlockCount,
5070 VkDeviceSize bufferImageGranularity,
5071 uint32_t frameInUseCount,
5072 bool isCustomPool) :
5073 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5074 m_hAllocator(hAllocator),
5075 m_MemoryTypeIndex(memoryTypeIndex),
5076 m_BlockVectorType(blockVectorType),
5077 m_PreferredBlockSize(preferredBlockSize),
5078 m_MinBlockCount(minBlockCount),
5079 m_MaxBlockCount(maxBlockCount),
5080 m_BufferImageGranularity(bufferImageGranularity),
5081 m_FrameInUseCount(frameInUseCount),
5082 m_IsCustomPool(isCustomPool),
5083 m_HasEmptyBlock(false),
5084 m_pDefragmentator(VMA_NULL)
5088 VmaBlockVector::~VmaBlockVector()
5090 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5092 for(
size_t i = m_Blocks.size(); i--; )
5094 m_Blocks[i]->Destroy(m_hAllocator);
5095 vma_delete(m_hAllocator, m_Blocks[i]);
5099 VkResult VmaBlockVector::CreateMinBlocks()
5101 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5103 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5104 if(res != VK_SUCCESS)
5112 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5119 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5121 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5123 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5125 VMA_HEAVY_ASSERT(pBlock->Validate());
5127 const uint32_t rangeCount = (uint32_t)pBlock->m_Suballocations.size();
5129 pStats->
size += pBlock->m_Size;
5136 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5138 VkResult VmaBlockVector::Allocate(
5139 VmaPool hCurrentPool,
5140 uint32_t currentFrameIndex,
5141 const VkMemoryRequirements& vkMemReq,
5143 VmaSuballocationType suballocType,
5144 VmaAllocation* pAllocation)
5148 (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED))
5150 VMA_ASSERT(0 &&
"Usage of VMA_ALLOCATION_CREATE_PERSISTENT_MAP_BIT must match VMA_POOL_CREATE_PERSISTENT_MAP_BIT.");
5151 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5154 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5158 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5160 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5161 VMA_ASSERT(pCurrBlock);
5162 VmaAllocationRequest currRequest = {};
5163 if(pCurrBlock->CreateAllocationRequest(
5166 m_BufferImageGranularity,
5174 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5177 if(pCurrBlock->IsEmpty())
5179 m_HasEmptyBlock =
false;
5182 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5183 pCurrBlock->Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5184 (*pAllocation)->InitBlockAllocation(
5193 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5194 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5199 const bool canCreateNewBlock =
5201 (m_Blocks.size() < m_MaxBlockCount);
5204 if(canCreateNewBlock)
5207 VkDeviceSize blockSize = m_PreferredBlockSize;
5208 size_t newBlockIndex = 0;
5209 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5212 if(res < 0 && m_IsCustomPool ==
false)
5216 if(blockSize >= vkMemReq.size)
5218 res = CreateBlock(blockSize, &newBlockIndex);
5223 if(blockSize >= vkMemReq.size)
5225 res = CreateBlock(blockSize, &newBlockIndex);
5230 if(res == VK_SUCCESS)
5232 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5233 VMA_ASSERT(pBlock->m_Size >= vkMemReq.size);
5236 VmaAllocationRequest allocRequest = {};
5237 allocRequest.item = pBlock->m_Suballocations.begin();
5238 allocRequest.offset = 0;
5239 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5240 pBlock->Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
5241 (*pAllocation)->InitBlockAllocation(
5244 allocRequest.offset,
5250 VMA_HEAVY_ASSERT(pBlock->Validate());
5251 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
5260 if(canMakeOtherLost)
5262 uint32_t tryIndex = 0;
5263 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
5265 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
5266 VmaAllocationRequest bestRequest = {};
5267 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
5271 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5273 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5274 VMA_ASSERT(pCurrBlock);
5275 VmaAllocationRequest currRequest = {};
5276 if(pCurrBlock->CreateAllocationRequest(
5279 m_BufferImageGranularity,
5286 const VkDeviceSize currRequestCost = currRequest.CalcCost();
5287 if(pBestRequestBlock == VMA_NULL ||
5288 currRequestCost < bestRequestCost)
5290 pBestRequestBlock = pCurrBlock;
5291 bestRequest = currRequest;
5292 bestRequestCost = currRequestCost;
5294 if(bestRequestCost == 0)
5302 if(pBestRequestBlock != VMA_NULL)
5304 if(pBestRequestBlock->MakeRequestedAllocationsLost(
5310 if(pBestRequestBlock->IsEmpty())
5312 m_HasEmptyBlock =
false;
5315 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex);
5316 pBestRequestBlock->Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
5317 (*pAllocation)->InitBlockAllocation(
5326 VMA_HEAVY_ASSERT(pBlock->Validate());
5327 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5341 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
5343 return VK_ERROR_TOO_MANY_OBJECTS;
5347 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
5350 void VmaBlockVector::Free(
5351 VmaAllocation hAllocation)
5353 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
5357 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5359 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
5361 pBlock->Free(hAllocation);
5362 VMA_HEAVY_ASSERT(pBlock->Validate());
5364 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
5367 if(pBlock->IsEmpty())
5370 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
5372 pBlockToDelete = pBlock;
5378 m_HasEmptyBlock =
true;
5382 IncrementallySortBlocks();
5387 if(pBlockToDelete != VMA_NULL)
5389 VMA_DEBUG_LOG(
" Deleted empty allocation");
5390 pBlockToDelete->Destroy(m_hAllocator);
5391 vma_delete(m_hAllocator, pBlockToDelete);
5395 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
5397 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5399 if(m_Blocks[blockIndex] == pBlock)
5401 VmaVectorRemove(m_Blocks, blockIndex);
5408 void VmaBlockVector::IncrementallySortBlocks()
5411 for(
size_t i = 1; i < m_Blocks.size(); ++i)
5413 if(m_Blocks[i - 1]->m_SumFreeSize > m_Blocks[i]->m_SumFreeSize)
5415 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
5421 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
5423 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
5424 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
5425 allocInfo.allocationSize = blockSize;
5426 const VkDevice hDevice = m_hAllocator->m_hDevice;
5427 VkDeviceMemory mem = VK_NULL_HANDLE;
5428 VkResult res = vkAllocateMemory(hDevice, &allocInfo, m_hAllocator->GetAllocationCallbacks(), &mem);
5437 void* pMappedData = VMA_NULL;
5438 const bool persistentMap = (m_BlockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED);
5439 if(persistentMap && m_hAllocator->m_UnmapPersistentlyMappedMemoryCounter == 0)
5441 res = vkMapMemory(hDevice, mem, 0, VK_WHOLE_SIZE, 0, &pMappedData);
5444 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
5445 vkFreeMemory(hDevice, mem, m_hAllocator->GetAllocationCallbacks());
5451 if(m_hAllocator->m_DeviceMemoryCallbacks.pfnAllocate != VMA_NULL)
5453 (*m_hAllocator->m_DeviceMemoryCallbacks.pfnAllocate)(m_hAllocator, m_MemoryTypeIndex, mem, allocInfo.allocationSize);
5457 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
5460 (VMA_BLOCK_VECTOR_TYPE)m_BlockVectorType,
5462 allocInfo.allocationSize,
5466 m_Blocks.push_back(pBlock);
5467 if(pNewBlockIndex != VMA_NULL)
5469 *pNewBlockIndex = m_Blocks.size() - 1;
5475 #if VMA_STATS_STRING_ENABLED 5477 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
5479 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5481 for(
size_t i = 0; i < m_Blocks.size(); ++i)
5483 m_Blocks[i]->PrintDetailedMap(json);
5488 #endif // #if VMA_STATS_STRING_ENABLED 5490 void VmaBlockVector::UnmapPersistentlyMappedMemory()
5492 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5494 for(
size_t i = m_Blocks.size(); i--; )
5496 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5497 if(pBlock->m_pMappedData != VMA_NULL)
5499 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
5500 vkUnmapMemory(m_hAllocator->m_hDevice, pBlock->m_hMemory);
5501 pBlock->m_pMappedData = VMA_NULL;
5506 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
5508 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5510 VkResult finalResult = VK_SUCCESS;
5511 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
5513 VmaDeviceMemoryBlock* pBlock = m_Blocks[i];
5514 if(pBlock->m_PersistentMap)
5516 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
5517 VkResult localResult = vkMapMemory(m_hAllocator->m_hDevice, pBlock->m_hMemory, 0, VK_WHOLE_SIZE, 0, &pBlock->m_pMappedData);
5518 if(localResult != VK_SUCCESS)
5520 finalResult = localResult;
5527 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
5529 const VkAllocationCallbacks* pAllocationCallbacks,
5530 uint32_t currentFrameIndex)
5532 if(m_pDefragmentator == VMA_NULL)
5534 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
5536 pAllocationCallbacks,
5541 return m_pDefragmentator;
5544 VkResult VmaBlockVector::Defragment(
5546 VkDeviceSize& maxBytesToMove,
5547 uint32_t& maxAllocationsToMove)
5549 if(m_pDefragmentator == VMA_NULL)
5554 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5557 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
5560 if(pDefragmentationStats != VMA_NULL)
5562 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
5563 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
5566 VMA_ASSERT(bytesMoved <= maxBytesToMove);
5567 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
5573 m_HasEmptyBlock =
false;
5574 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
5576 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
5577 if(pBlock->IsEmpty())
5579 if(m_Blocks.size() > m_MinBlockCount)
5581 if(pDefragmentationStats != VMA_NULL)
5584 pDefragmentationStats->
bytesFreed += pBlock->m_Size;
5587 VmaVectorRemove(m_Blocks, blockIndex);
5588 pBlock->Destroy(m_hAllocator);
5589 vma_delete(m_hAllocator, pBlock);
5593 m_HasEmptyBlock =
true;
5601 void VmaBlockVector::DestroyDefragmentator()
5603 if(m_pDefragmentator != VMA_NULL)
5605 vma_delete(m_hAllocator, m_pDefragmentator);
5606 m_pDefragmentator = VMA_NULL;
5610 void VmaBlockVector::MakePoolAllocationsLost(
5611 uint32_t currentFrameIndex,
5612 size_t* pLostAllocationCount)
5614 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5616 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5618 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5620 pBlock->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
5624 void VmaBlockVector::AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
5626 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5628 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5630 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5632 VMA_HEAVY_ASSERT(pBlock->Validate());
5634 CalcAllocationStatInfo(allocationStatInfo, *pBlock);
5635 VmaAddStatInfo(pStats->
total, allocationStatInfo);
5636 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
5637 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
5644 VmaDefragmentator::VmaDefragmentator(
5646 const VkAllocationCallbacks* pAllocationCallbacks,
5647 VmaBlockVector* pBlockVector,
5648 uint32_t currentFrameIndex) :
5650 m_pAllocationCallbacks(pAllocationCallbacks),
5651 m_pBlockVector(pBlockVector),
5652 m_CurrentFrameIndex(currentFrameIndex),
5654 m_AllocationsMoved(0),
5655 m_Allocations(VmaStlAllocator<AllocationInfo>(pAllocationCallbacks)),
5656 m_Blocks(VmaStlAllocator<BlockInfo*>(pAllocationCallbacks))
5660 VmaDefragmentator::~VmaDefragmentator()
5662 for(
size_t i = m_Blocks.size(); i--; )
5664 vma_delete(m_pAllocationCallbacks, m_Blocks[i]);
5668 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
5670 AllocationInfo allocInfo;
5671 allocInfo.m_hAllocation = hAlloc;
5672 allocInfo.m_pChanged = pChanged;
5673 m_Allocations.push_back(allocInfo);
5676 VkResult VmaDefragmentator::DefragmentRound(
5677 VkDeviceSize maxBytesToMove,
5678 uint32_t maxAllocationsToMove)
5680 if(m_Blocks.empty())
5685 size_t srcBlockIndex = m_Blocks.size() - 1;
5686 size_t srcAllocIndex = SIZE_MAX;
5692 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
5694 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
5697 if(srcBlockIndex == 0)
5704 srcAllocIndex = SIZE_MAX;
5709 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
5713 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
5714 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
5716 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
5717 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
5718 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
5719 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
5722 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
5724 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
5725 VmaAllocationRequest dstAllocRequest;
5726 if(pDstBlockInfo->m_pBlock->CreateAllocationRequest(
5727 m_CurrentFrameIndex,
5728 m_pBlockVector->GetFrameInUseCount(),
5729 m_pBlockVector->GetBufferImageGranularity(),
5734 &dstAllocRequest) &&
5736 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
5738 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
5741 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
5742 (m_BytesMoved + size > maxBytesToMove))
5744 return VK_INCOMPLETE;
5747 void* pDstMappedData = VMA_NULL;
5748 VkResult res = pDstBlockInfo->EnsureMapping(m_hDevice, &pDstMappedData);
5749 if(res != VK_SUCCESS)
5754 void* pSrcMappedData = VMA_NULL;
5755 res = pSrcBlockInfo->EnsureMapping(m_hDevice, &pSrcMappedData);
5756 if(res != VK_SUCCESS)
5763 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
5764 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
5767 pDstBlockInfo->m_pBlock->Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
5768 pSrcBlockInfo->m_pBlock->Free(allocInfo.m_hAllocation);
5770 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
5772 if(allocInfo.m_pChanged != VMA_NULL)
5774 *allocInfo.m_pChanged = VK_TRUE;
5777 ++m_AllocationsMoved;
5778 m_BytesMoved += size;
5780 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
5788 if(srcAllocIndex > 0)
5794 if(srcBlockIndex > 0)
5797 srcAllocIndex = SIZE_MAX;
5807 VkResult VmaDefragmentator::Defragment(
5808 VkDeviceSize maxBytesToMove,
5809 uint32_t maxAllocationsToMove)
5811 if(m_Allocations.empty())
5817 const size_t blockCount = m_pBlockVector->m_Blocks.size();
5818 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
5820 BlockInfo* pBlockInfo = vma_new(m_pAllocationCallbacks, BlockInfo)(m_pAllocationCallbacks);
5821 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
5822 m_Blocks.push_back(pBlockInfo);
5826 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
5829 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
5831 AllocationInfo& allocInfo = m_Allocations[blockIndex];
5833 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
5835 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
5836 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
5837 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
5839 (*it)->m_Allocations.push_back(allocInfo);
5847 m_Allocations.clear();
5849 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
5851 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
5852 pBlockInfo->CalcHasNonMovableAllocations();
5853 pBlockInfo->SortAllocationsBySizeDescecnding();
5857 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
5860 VkResult result = VK_SUCCESS;
5861 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
5863 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
5867 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
5869 m_Blocks[blockIndex]->Unmap(m_hDevice);
5875 bool VmaDefragmentator::MoveMakesSense(
5876 size_t dstBlockIndex, VkDeviceSize dstOffset,
5877 size_t srcBlockIndex, VkDeviceSize srcOffset)
5879 if(dstBlockIndex < srcBlockIndex)
5883 if(dstBlockIndex > srcBlockIndex)
5887 if(dstOffset < srcOffset)
5899 m_PhysicalDevice(pCreateInfo->physicalDevice),
5900 m_hDevice(pCreateInfo->device),
5901 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
5902 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
5903 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
5904 m_UnmapPersistentlyMappedMemoryCounter(0),
5905 m_PreferredLargeHeapBlockSize(0),
5906 m_PreferredSmallHeapBlockSize(0),
5907 m_CurrentFrameIndex(0),
5908 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
5912 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
5913 memset(&m_MemProps, 0,
sizeof(m_MemProps));
5914 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
5916 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
5917 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
5925 vkGetPhysicalDeviceProperties(m_PhysicalDevice, &m_PhysicalDeviceProperties);
5926 vkGetPhysicalDeviceMemoryProperties(m_PhysicalDevice, &m_MemProps);
5933 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
5935 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
5937 for(
size_t blockVectorTypeIndex = 0; blockVectorTypeIndex < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorTypeIndex)
5939 m_pBlockVectors[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, VmaBlockVector)(
5942 static_cast<VMA_BLOCK_VECTOR_TYPE
>(blockVectorTypeIndex),
5946 GetBufferImageGranularity(),
5951 m_pOwnAllocations[memTypeIndex][blockVectorTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
5956 VmaAllocator_T::~VmaAllocator_T()
5958 VMA_ASSERT(m_Pools.empty());
5960 for(
size_t i = GetMemoryTypeCount(); i--; )
5962 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
5964 vma_delete(
this, m_pOwnAllocations[i][j]);
5965 vma_delete(
this, m_pBlockVectors[i][j]);
5970 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
5972 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[m_MemProps.memoryTypes[memTypeIndex].heapIndex].size;
5973 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
5974 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
5977 VkResult VmaAllocator_T::AllocateMemoryOfType(
5978 const VkMemoryRequirements& vkMemReq,
5980 uint32_t memTypeIndex,
5981 VmaSuballocationType suballocType,
5982 VmaAllocation* pAllocation)
5984 VMA_ASSERT(pAllocation != VMA_NULL);
5985 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
5987 uint32_t blockVectorType = VmaAllocationCreateFlagsToBlockVectorType(createInfo.
flags);
5988 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
5989 VMA_ASSERT(blockVector);
5991 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
5993 const bool ownMemory =
5995 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
5997 vkMemReq.size > preferredBlockSize / 2);
6003 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6007 return AllocateOwnMemory(
6018 VkResult res = blockVector->Allocate(
6020 m_CurrentFrameIndex.load(),
6025 if(res == VK_SUCCESS)
6031 res = AllocateOwnMemory(
6036 createInfo.pUserData,
6038 if(res == VK_SUCCESS)
6041 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
6047 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6053 VkResult VmaAllocator_T::AllocateOwnMemory(
6055 VmaSuballocationType suballocType,
6056 uint32_t memTypeIndex,
6059 VmaAllocation* pAllocation)
6061 VMA_ASSERT(pAllocation);
6063 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6064 allocInfo.memoryTypeIndex = memTypeIndex;
6065 allocInfo.allocationSize = size;
6068 VkDeviceMemory hMemory = VK_NULL_HANDLE;
6069 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &hMemory);
6072 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6076 void* pMappedData =
nullptr;
6079 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
6081 res = vkMapMemory(m_hDevice, hMemory, 0, VK_WHOLE_SIZE, 0, &pMappedData);
6084 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
6085 vkFreeMemory(m_hDevice, hMemory, GetAllocationCallbacks());
6092 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
6094 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, memTypeIndex, hMemory, size);
6097 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load());
6098 (*pAllocation)->InitOwnAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
6102 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6103 AllocationVectorType* pOwnAllocations = m_pOwnAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
6104 VMA_ASSERT(pOwnAllocations);
6105 VmaVectorInsertSorted<VmaPointerLess>(*pOwnAllocations, *pAllocation);
6108 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
6113 VkResult VmaAllocator_T::AllocateMemory(
6114 const VkMemoryRequirements& vkMemReq,
6116 VmaSuballocationType suballocType,
6117 VmaAllocation* pAllocation)
6122 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
6123 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6125 if((createInfo.
pool != VK_NULL_HANDLE) &&
6128 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_OWN_MEMORY_BIT when pool != null is invalid.");
6129 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6132 if(createInfo.
pool != VK_NULL_HANDLE)
6134 return createInfo.
pool->m_BlockVector.Allocate(
6136 m_CurrentFrameIndex.load(),
6145 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
6146 uint32_t memTypeIndex = UINT32_MAX;
6148 if(res == VK_SUCCESS)
6150 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6152 if(res == VK_SUCCESS)
6162 memoryTypeBits &= ~(1u << memTypeIndex);
6165 if(res == VK_SUCCESS)
6167 res = AllocateMemoryOfType(vkMemReq, createInfo, memTypeIndex, suballocType, pAllocation);
6169 if(res == VK_SUCCESS)
6179 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6190 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
6192 VMA_ASSERT(allocation);
6194 if(allocation->CanBecomeLost() ==
false ||
6195 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6197 switch(allocation->GetType())
6199 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
6201 VmaBlockVector* pBlockVector = VMA_NULL;
6202 VmaPool hPool = allocation->GetPool();
6203 if(hPool != VK_NULL_HANDLE)
6205 pBlockVector = &hPool->m_BlockVector;
6209 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6210 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
6211 pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6213 pBlockVector->Free(allocation);
6216 case VmaAllocation_T::ALLOCATION_TYPE_OWN:
6217 FreeOwnMemory(allocation);
6224 vma_delete(
this, allocation);
6227 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
6229 InitStatInfo(pStats->
total);
6230 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
6232 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6235 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6237 const uint32_t heapIndex = m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6238 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6240 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
6241 VMA_ASSERT(pBlockVector);
6242 pBlockVector->AddStats(pStats, memTypeIndex, heapIndex);
6246 VmaPostprocessCalcStatInfo(pStats->
total);
6247 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
6248 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
6249 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
6250 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
6253 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
6255 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
6257 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
6259 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6261 for(
size_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
6263 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6264 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6265 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6269 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6270 AllocationVectorType* pOwnAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6271 for(
size_t ownAllocIndex = pOwnAllocationsVector->size(); ownAllocIndex--; )
6273 VmaAllocation hAlloc = (*pOwnAllocationsVector)[ownAllocIndex];
6274 hAlloc->OwnAllocUnmapPersistentlyMappedMemory(m_hDevice);
6280 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6281 pBlockVector->UnmapPersistentlyMappedMemory();
6288 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6289 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6291 m_Pools[poolIndex]->GetBlockVector().UnmapPersistentlyMappedMemory();
6298 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
6300 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
6301 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
6303 VkResult finalResult = VK_SUCCESS;
6304 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
6308 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6309 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
6311 m_Pools[poolIndex]->GetBlockVector().MapPersistentlyMappedMemory();
6315 for(
size_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
6317 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
6318 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
6319 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6323 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6324 AllocationVectorType* pAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6325 for(
size_t ownAllocIndex = 0, ownAllocCount = pAllocationsVector->size(); ownAllocIndex < ownAllocCount; ++ownAllocIndex)
6327 VmaAllocation hAlloc = (*pAllocationsVector)[ownAllocIndex];
6328 hAlloc->OwnAllocMapPersistentlyMappedMemory(m_hDevice);
6334 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
6335 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
6336 if(localResult != VK_SUCCESS)
6338 finalResult = localResult;
6350 VkResult VmaAllocator_T::Defragment(
6351 VmaAllocation* pAllocations,
6352 size_t allocationCount,
6353 VkBool32* pAllocationsChanged,
6357 if(pAllocationsChanged != VMA_NULL)
6359 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
6361 if(pDefragmentationStats != VMA_NULL)
6363 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
6366 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
6368 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
6369 return VK_ERROR_MEMORY_MAP_FAILED;
6372 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
6374 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
6376 const size_t poolCount = m_Pools.size();
6379 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
6381 VmaAllocation hAlloc = pAllocations[allocIndex];
6383 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
6385 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
6387 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
6389 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
6391 VmaBlockVector* pAllocBlockVector =
nullptr;
6393 const VmaPool hAllocPool = hAlloc->GetPool();
6395 if(hAllocPool != VK_NULL_HANDLE)
6397 pAllocBlockVector = &hAllocPool->GetBlockVector();
6402 pAllocBlockVector = m_pBlockVectors[memTypeIndex][hAlloc->GetBlockVectorType()];
6405 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
6407 GetAllocationCallbacks(),
6410 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
6411 &pAllocationsChanged[allocIndex] : VMA_NULL;
6412 pDefragmentator->AddAllocation(hAlloc, pChanged);
6416 VkResult result = VK_SUCCESS;
6420 VkDeviceSize maxBytesToMove = SIZE_MAX;
6421 uint32_t maxAllocationsToMove = UINT32_MAX;
6422 if(pDefragmentationInfo != VMA_NULL)
6429 for(uint32_t memTypeIndex = 0;
6430 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
6434 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6436 for(uint32_t blockVectorType = 0;
6437 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
6440 result = m_pBlockVectors[memTypeIndex][blockVectorType]->Defragment(
6441 pDefragmentationStats,
6443 maxAllocationsToMove);
6449 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
6451 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
6452 pDefragmentationStats,
6454 maxAllocationsToMove);
6460 for(
size_t poolIndex = poolCount; poolIndex--; )
6462 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
6466 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
6468 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6470 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
6472 m_pBlockVectors[memTypeIndex][blockVectorType]->DestroyDefragmentator();
6480 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
6482 if(hAllocation->CanBecomeLost())
6488 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
6489 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
6492 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6496 pAllocationInfo->
offset = 0;
6497 pAllocationInfo->
size = hAllocation->GetSize();
6499 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6502 else if(localLastUseFrameIndex == localCurrFrameIndex)
6504 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6505 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6506 pAllocationInfo->
offset = hAllocation->GetOffset();
6507 pAllocationInfo->
size = hAllocation->GetSize();
6508 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6509 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6514 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
6516 localLastUseFrameIndex = localCurrFrameIndex;
6524 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
6525 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
6526 pAllocationInfo->
offset = hAllocation->GetOffset();
6527 pAllocationInfo->
size = hAllocation->GetSize();
6528 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
6529 pAllocationInfo->
pUserData = hAllocation->GetUserData();
6533 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
6535 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
6548 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
6550 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
6551 if(res != VK_SUCCESS)
6553 vma_delete(
this, *pPool);
6560 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6561 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
6567 void VmaAllocator_T::DestroyPool(VmaPool pool)
6571 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6572 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
6573 VMA_ASSERT(success &&
"Pool not found in Allocator.");
6576 vma_delete(
this, pool);
6579 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
6581 pool->m_BlockVector.GetPoolStats(pPoolStats);
6584 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
6586 m_CurrentFrameIndex.store(frameIndex);
6589 void VmaAllocator_T::MakePoolAllocationsLost(
6591 size_t* pLostAllocationCount)
6593 hPool->m_BlockVector.MakePoolAllocationsLost(
6594 m_CurrentFrameIndex.load(),
6595 pLostAllocationCount);
6598 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
6600 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST);
6601 (*pAllocation)->InitLost();
6604 void VmaAllocator_T::FreeOwnMemory(VmaAllocation allocation)
6606 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_OWN);
6608 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
6610 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6611 AllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex][allocation->GetBlockVectorType()];
6612 VMA_ASSERT(pOwnAllocations);
6613 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pOwnAllocations, allocation);
6614 VMA_ASSERT(success);
6617 VkDeviceMemory hMemory = allocation->GetMemory();
6620 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
6622 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memTypeIndex, hMemory, allocation->GetSize());
6625 if(allocation->GetMappedData() != VMA_NULL)
6627 vkUnmapMemory(m_hDevice, hMemory);
6630 vkFreeMemory(m_hDevice, hMemory, GetAllocationCallbacks());
6632 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
6635 #if VMA_STATS_STRING_ENABLED 6637 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
6639 bool ownAllocationsStarted =
false;
6640 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6642 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
6643 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6645 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
6646 VMA_ASSERT(pOwnAllocVector);
6647 if(pOwnAllocVector->empty() ==
false)
6649 if(ownAllocationsStarted ==
false)
6651 ownAllocationsStarted =
true;
6652 json.WriteString(
"OwnAllocations");
6656 json.BeginString(
"Type ");
6657 json.ContinueString(memTypeIndex);
6658 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
6660 json.ContinueString(
" Mapped");
6666 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
6668 const VmaAllocation hAlloc = (*pOwnAllocVector)[i];
6669 json.BeginObject(
true);
6671 json.WriteString(
"Size");
6672 json.WriteNumber(hAlloc->GetSize());
6674 json.WriteString(
"Type");
6675 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
6684 if(ownAllocationsStarted)
6690 bool allocationsStarted =
false;
6691 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6693 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
6695 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
6697 if(allocationsStarted ==
false)
6699 allocationsStarted =
true;
6700 json.WriteString(
"Allocations");
6704 json.BeginString(
"Type ");
6705 json.ContinueString(memTypeIndex);
6706 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
6708 json.ContinueString(
" Mapped");
6712 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(json);
6716 if(allocationsStarted)
6723 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
6724 const size_t poolCount = m_Pools.size();
6727 json.WriteString(
"Pools");
6729 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
6731 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
6738 #endif // #if VMA_STATS_STRING_ENABLED 6740 static VkResult AllocateMemoryForImage(
6741 VmaAllocator allocator,
6744 VmaSuballocationType suballocType,
6745 VmaAllocation* pAllocation)
6747 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
6749 VkMemoryRequirements vkMemReq = {};
6750 vkGetImageMemoryRequirements(allocator->m_hDevice, image, &vkMemReq);
6752 return allocator->AllocateMemory(
6754 *pAllocationCreateInfo,
6764 VmaAllocator* pAllocator)
6766 VMA_ASSERT(pCreateInfo && pAllocator);
6767 VMA_DEBUG_LOG(
"vmaCreateAllocator");
6773 VmaAllocator allocator)
6775 if(allocator != VK_NULL_HANDLE)
6777 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
6778 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
6779 vma_delete(&allocationCallbacks, allocator);
6784 VmaAllocator allocator,
6785 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
6787 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
6788 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
6792 VmaAllocator allocator,
6793 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
6795 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
6796 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
6800 VmaAllocator allocator,
6801 uint32_t memoryTypeIndex,
6802 VkMemoryPropertyFlags* pFlags)
6804 VMA_ASSERT(allocator && pFlags);
6805 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
6806 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
6810 VmaAllocator allocator,
6811 uint32_t frameIndex)
6813 VMA_ASSERT(allocator);
6814 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
6816 VMA_DEBUG_GLOBAL_MUTEX_LOCK
6818 allocator->SetCurrentFrameIndex(frameIndex);
6822 VmaAllocator allocator,
6825 VMA_ASSERT(allocator && pStats);
6826 VMA_DEBUG_GLOBAL_MUTEX_LOCK
6827 allocator->CalculateStats(pStats);
6830 #if VMA_STATS_STRING_ENABLED 6833 VmaAllocator allocator,
6834 char** ppStatsString,
6835 VkBool32 detailedMap)
6837 VMA_ASSERT(allocator && ppStatsString);
6838 VMA_DEBUG_GLOBAL_MUTEX_LOCK
6840 VmaStringBuilder sb(allocator);
6842 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
6846 allocator->CalculateStats(&stats);
6848 json.WriteString(
"Total");
6849 VmaPrintStatInfo(json, stats.
total);
6851 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
6853 json.BeginString(
"Heap ");
6854 json.ContinueString(heapIndex);
6858 json.WriteString(
"Size");
6859 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
6861 json.WriteString(
"Flags");
6862 json.BeginArray(
true);
6863 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
6865 json.WriteString(
"DEVICE_LOCAL");
6871 json.WriteString(
"Stats");
6872 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
6875 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
6877 if(allocator->m_MemProps.memoryTypes[typeIndex].heapIndex == heapIndex)
6879 json.BeginString(
"Type ");
6880 json.ContinueString(typeIndex);
6885 json.WriteString(
"Flags");
6886 json.BeginArray(
true);
6887 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
6888 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
6890 json.WriteString(
"DEVICE_LOCAL");
6892 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
6894 json.WriteString(
"HOST_VISIBLE");
6896 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
6898 json.WriteString(
"HOST_COHERENT");
6900 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
6902 json.WriteString(
"HOST_CACHED");
6904 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
6906 json.WriteString(
"LAZILY_ALLOCATED");
6912 json.WriteString(
"Stats");
6913 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
6922 if(detailedMap == VK_TRUE)
6924 allocator->PrintDetailedMap(json);
6930 const size_t len = sb.GetLength();
6931 char*
const pChars = vma_new_array(allocator,
char, len + 1);
6934 memcpy(pChars, sb.GetData(), len);
6937 *ppStatsString = pChars;
6941 VmaAllocator allocator,
6944 if(pStatsString != VMA_NULL)
6946 VMA_ASSERT(allocator);
6947 size_t len = strlen(pStatsString);
6948 vma_delete_array(allocator, pStatsString, len + 1);
6952 #endif // #if VMA_STATS_STRING_ENABLED 6957 VmaAllocator allocator,
6958 uint32_t memoryTypeBits,
6960 uint32_t* pMemoryTypeIndex)
6962 VMA_ASSERT(allocator != VK_NULL_HANDLE);
6963 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
6964 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
6966 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
6968 if(preferredFlags == 0)
6970 preferredFlags = requiredFlags;
6973 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
6976 switch(pAllocationCreateInfo->
usage)
6981 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
6984 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
6987 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6988 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
6991 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6992 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
7000 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7003 *pMemoryTypeIndex = UINT32_MAX;
7004 uint32_t minCost = UINT32_MAX;
7005 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
7006 memTypeIndex < allocator->GetMemoryTypeCount();
7007 ++memTypeIndex, memTypeBit <<= 1)
7010 if((memTypeBit & memoryTypeBits) != 0)
7012 const VkMemoryPropertyFlags currFlags =
7013 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
7015 if((requiredFlags & ~currFlags) == 0)
7018 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
7020 if(currCost < minCost)
7022 *pMemoryTypeIndex = memTypeIndex;
7032 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
7036 VmaAllocator allocator,
7040 VMA_ASSERT(allocator && pCreateInfo && pPool);
7042 VMA_DEBUG_LOG(
"vmaCreatePool");
7044 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7046 return allocator->CreatePool(pCreateInfo, pPool);
7050 VmaAllocator allocator,
7053 VMA_ASSERT(allocator && pool);
7055 VMA_DEBUG_LOG(
"vmaDestroyPool");
7057 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7059 allocator->DestroyPool(pool);
7063 VmaAllocator allocator,
7067 VMA_ASSERT(allocator && pool && pPoolStats);
7069 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7071 allocator->GetPoolStats(pool, pPoolStats);
7075 VmaAllocator allocator,
7077 size_t* pLostAllocationCount)
7079 VMA_ASSERT(allocator && pool);
7081 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7083 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
7087 VmaAllocator allocator,
7088 const VkMemoryRequirements* pVkMemoryRequirements,
7090 VmaAllocation* pAllocation,
7093 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
7095 VMA_DEBUG_LOG(
"vmaAllocateMemory");
7097 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7099 VkResult result = allocator->AllocateMemory(
7100 *pVkMemoryRequirements,
7102 VMA_SUBALLOCATION_TYPE_UNKNOWN,
7105 if(pAllocationInfo && result == VK_SUCCESS)
7107 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7114 VmaAllocator allocator,
7117 VmaAllocation* pAllocation,
7120 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7122 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
7124 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7126 VkMemoryRequirements vkMemReq = {};
7127 vkGetBufferMemoryRequirements(allocator->m_hDevice, buffer, &vkMemReq);
7129 VkResult result = allocator->AllocateMemory(
7132 VMA_SUBALLOCATION_TYPE_BUFFER,
7135 if(pAllocationInfo && result == VK_SUCCESS)
7137 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7144 VmaAllocator allocator,
7147 VmaAllocation* pAllocation,
7150 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
7152 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
7154 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7156 VkResult result = AllocateMemoryForImage(
7160 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
7163 if(pAllocationInfo && result == VK_SUCCESS)
7165 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7172 VmaAllocator allocator,
7173 VmaAllocation allocation)
7175 VMA_ASSERT(allocator && allocation);
7177 VMA_DEBUG_LOG(
"vmaFreeMemory");
7179 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7181 allocator->FreeMemory(allocation);
7185 VmaAllocator allocator,
7186 VmaAllocation allocation,
7189 VMA_ASSERT(allocator && allocation && pAllocationInfo);
7191 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7193 allocator->GetAllocationInfo(allocation, pAllocationInfo);
7197 VmaAllocator allocator,
7198 VmaAllocation allocation,
7201 VMA_ASSERT(allocator && allocation);
7203 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7205 allocation->SetUserData(pUserData);
7209 VmaAllocator allocator,
7210 VmaAllocation* pAllocation)
7212 VMA_ASSERT(allocator && pAllocation);
7214 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
7216 allocator->CreateLostAllocation(pAllocation);
7220 VmaAllocator allocator,
7221 VmaAllocation allocation,
7224 VMA_ASSERT(allocator && allocation && ppData);
7226 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7228 return vkMapMemory(allocator->m_hDevice, allocation->GetMemory(),
7229 allocation->GetOffset(), allocation->GetSize(), 0, ppData);
7233 VmaAllocator allocator,
7234 VmaAllocation allocation)
7236 VMA_ASSERT(allocator && allocation);
7238 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7240 vkUnmapMemory(allocator->m_hDevice, allocation->GetMemory());
7245 VMA_ASSERT(allocator);
7247 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7249 allocator->UnmapPersistentlyMappedMemory();
7254 VMA_ASSERT(allocator);
7256 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7258 return allocator->MapPersistentlyMappedMemory();
7262 VmaAllocator allocator,
7263 VmaAllocation* pAllocations,
7264 size_t allocationCount,
7265 VkBool32* pAllocationsChanged,
7269 VMA_ASSERT(allocator && pAllocations);
7271 VMA_DEBUG_LOG(
"vmaDefragment");
7273 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7275 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
7279 VmaAllocator allocator,
7280 const VkBufferCreateInfo* pBufferCreateInfo,
7283 VmaAllocation* pAllocation,
7286 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
7288 VMA_DEBUG_LOG(
"vmaCreateBuffer");
7290 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7292 *pBuffer = VK_NULL_HANDLE;
7293 *pAllocation = VK_NULL_HANDLE;
7296 VkResult res = vkCreateBuffer(allocator->m_hDevice, pBufferCreateInfo, allocator->GetAllocationCallbacks(), pBuffer);
7300 VkMemoryRequirements vkMemReq = {};
7301 vkGetBufferMemoryRequirements(allocator->m_hDevice, *pBuffer, &vkMemReq);
7304 res = allocator->AllocateMemory(
7306 *pAllocationCreateInfo,
7307 VMA_SUBALLOCATION_TYPE_BUFFER,
7312 res = vkBindBufferMemory(allocator->m_hDevice, *pBuffer, (*pAllocation)->GetMemory(), (*pAllocation)->GetOffset());
7316 if(pAllocationInfo != VMA_NULL)
7318 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7322 allocator->FreeMemory(*pAllocation);
7323 *pAllocation = VK_NULL_HANDLE;
7326 vkDestroyBuffer(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
7327 *pBuffer = VK_NULL_HANDLE;
7334 VmaAllocator allocator,
7336 VmaAllocation allocation)
7338 if(buffer != VK_NULL_HANDLE)
7340 VMA_ASSERT(allocator);
7342 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
7344 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7346 vkDestroyBuffer(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
7348 allocator->FreeMemory(allocation);
7353 VmaAllocator allocator,
7354 const VkImageCreateInfo* pImageCreateInfo,
7357 VmaAllocation* pAllocation,
7360 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
7362 VMA_DEBUG_LOG(
"vmaCreateImage");
7364 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7366 *pImage = VK_NULL_HANDLE;
7367 *pAllocation = VK_NULL_HANDLE;
7370 VkResult res = vkCreateImage(allocator->m_hDevice, pImageCreateInfo, allocator->GetAllocationCallbacks(), pImage);
7373 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
7374 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
7375 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
7378 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
7382 res = vkBindImageMemory(allocator->m_hDevice, *pImage, (*pAllocation)->GetMemory(), (*pAllocation)->GetOffset());
7386 if(pAllocationInfo != VMA_NULL)
7388 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
7392 allocator->FreeMemory(*pAllocation);
7393 *pAllocation = VK_NULL_HANDLE;
7396 vkDestroyImage(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
7397 *pImage = VK_NULL_HANDLE;
7404 VmaAllocator allocator,
7406 VmaAllocation allocation)
7408 if(image != VK_NULL_HANDLE)
7410 VMA_ASSERT(allocator);
7412 VMA_DEBUG_LOG(
"vmaDestroyImage");
7414 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7416 vkDestroyImage(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
7418 allocator->FreeMemory(allocation);
7422 #endif // #ifdef VMA_IMPLEMENTATION VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:457
Definition: vk_mem_alloc.h:756
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
uint32_t BlockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:540
Memory will be used for frequent writing on device and readback on host (download).
Definition: vk_mem_alloc.h:607
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:877
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1027
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:808
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:656
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:689
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:420
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:469
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:758
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:451
VkDeviceSize preferredSmallHeapBlockSize
Size of a single memory block to allocate for resources from a small heap <= 512 MB.
Definition: vk_mem_alloc.h:466
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:448
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1031
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:486
VmaStatInfo total
Definition: vk_mem_alloc.h:558
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1039
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:672
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1022
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:460
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:762
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:887
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:691
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:778
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:814
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:765
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
Definition: vk_mem_alloc.h:665
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1017
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VkDeviceSize AllocationSizeMax
Definition: vk_mem_alloc.h:549
Definition: vk_mem_alloc.h:736
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1035
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:554
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:645
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1037
VmaMemoryUsage
Definition: vk_mem_alloc.h:593
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:683
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:444
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VmaAllocatorFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:439
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:537
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:773
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:431
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:435
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that are HOST_COHERENT and DEVICE_LOCAL.
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:768
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:550
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:414
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:678
Definition: vk_mem_alloc.h:669
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:786
VkDeviceSize AllocationSizeMin
Definition: vk_mem_alloc.h:549
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:472
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:817
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:696
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:556
VkDeviceSize AllocationSizeAvg
Definition: vk_mem_alloc.h:549
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
uint32_t AllocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:542
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:433
Definition: vk_mem_alloc.h:663
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:800
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:454
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
VkDeviceSize UsedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:546
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:898
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:624
VkDeviceSize preferredLargeHeapBlockSize
Size of a single memory block to allocate for resources.
Definition: vk_mem_alloc.h:463
uint32_t UnusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:544
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:805
Memory will be mapped on host. Could be used for transfer to/from device.
Definition: vk_mem_alloc.h:601
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:550
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:882
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1033
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:667
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:727
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:893
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
No intended memory usage specified.
Definition: vk_mem_alloc.h:596
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
Definition: vk_mem_alloc.h:608
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:863
Memory will be used for frequent (dynamic) updates from host and reads on device (upload).
Definition: vk_mem_alloc.h:604
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:612
Definition: vk_mem_alloc.h:446
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:635
Memory will be used on device only, so faster access from the device is preferred. No need to be mappable on host.
Definition: vk_mem_alloc.h:598
struct VmaStatInfo VmaStatInfo
VkDeviceSize UnusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:548
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:557
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:811
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:754
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:550
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:868
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.