23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 161 #include <vulkan/vulkan.h> 168 VK_DEFINE_HANDLE(VmaAllocator)
172 VmaAllocator allocator,
174 VkDeviceMemory memory,
178 VmaAllocator allocator,
180 VkDeviceMemory memory,
235 VmaAllocator* pAllocator);
239 VmaAllocator allocator);
246 VmaAllocator allocator,
247 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
254 VmaAllocator allocator,
255 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
264 VmaAllocator allocator,
265 uint32_t memoryTypeIndex,
266 VkMemoryPropertyFlags* pFlags);
289 VmaAllocator allocator,
292 #define VMA_STATS_STRING_ENABLED 1 294 #if VMA_STATS_STRING_ENABLED 300 VmaAllocator allocator,
301 char** ppStatsString,
302 VkBool32 detailedMap);
305 VmaAllocator allocator,
308 #endif // #if VMA_STATS_STRING_ENABLED 403 VmaAllocator allocator,
404 uint32_t memoryTypeBits,
406 uint32_t* pMemoryTypeIndex);
415 VK_DEFINE_HANDLE(VmaAllocation)
466 VmaAllocator allocator,
467 const VkMemoryRequirements* pVkMemoryRequirements,
469 VmaAllocation* pAllocation,
479 VmaAllocator allocator,
482 VmaAllocation* pAllocation,
487 VmaAllocator allocator,
490 VmaAllocation* pAllocation,
495 VmaAllocator allocator,
496 VmaAllocation allocation);
500 VmaAllocator allocator,
501 VmaAllocation allocation,
506 VmaAllocator allocator,
507 VmaAllocation allocation,
519 VmaAllocator allocator,
520 VmaAllocation allocation,
524 VmaAllocator allocator,
525 VmaAllocation allocation);
653 VmaAllocator allocator,
654 VmaAllocation* pAllocations,
655 size_t allocationCount,
656 VkBool32* pAllocationsChanged,
686 VmaAllocator allocator,
687 const VkBufferCreateInfo* pCreateInfo,
690 VmaAllocation* pAllocation,
694 VmaAllocator allocator,
696 VmaAllocation allocation);
700 VmaAllocator allocator,
701 const VkImageCreateInfo* pCreateInfo,
704 VmaAllocation* pAllocation,
708 VmaAllocator allocator,
710 VmaAllocation allocation);
714 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 716 #ifdef VMA_IMPLEMENTATION 717 #undef VMA_IMPLEMENTATION 738 #if VMA_USE_STL_CONTAINERS 739 #define VMA_USE_STL_VECTOR 1 740 #define VMA_USE_STL_UNORDERED_MAP 1 741 #define VMA_USE_STL_LIST 1 744 #if VMA_USE_STL_VECTOR 748 #if VMA_USE_STL_UNORDERED_MAP 749 #include <unordered_map> 771 #define VMA_ASSERT(expr) assert(expr) 773 #define VMA_ASSERT(expr) 779 #ifndef VMA_HEAVY_ASSERT 781 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 783 #define VMA_HEAVY_ASSERT(expr) 789 #define VMA_NULL nullptr 793 #define VMA_ALIGN_OF(type) (__alignof(type)) 796 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 798 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 800 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 804 #ifndef VMA_SYSTEM_FREE 806 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 808 #define VMA_SYSTEM_FREE(ptr) free(ptr) 813 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 817 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 821 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 825 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 828 #ifndef VMA_DEBUG_LOG 829 #define VMA_DEBUG_LOG(format, ...) 839 #if VMA_STATS_STRING_ENABLED 840 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
842 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
844 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
846 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
856 void Lock() { m_Mutex.lock(); }
857 void Unlock() { m_Mutex.unlock(); }
861 #define VMA_MUTEX VmaMutex 877 #define VMA_BEST_FIT (1) 880 #ifndef VMA_DEBUG_ALWAYS_OWN_MEMORY 885 #define VMA_DEBUG_ALWAYS_OWN_MEMORY (0) 888 #ifndef VMA_DEBUG_ALIGNMENT 893 #define VMA_DEBUG_ALIGNMENT (1) 896 #ifndef VMA_DEBUG_MARGIN 901 #define VMA_DEBUG_MARGIN (0) 904 #ifndef VMA_DEBUG_GLOBAL_MUTEX 909 #define VMA_DEBUG_GLOBAL_MUTEX (0) 912 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 917 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 920 #ifndef VMA_SMALL_HEAP_MAX_SIZE 921 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 925 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 926 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 930 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 931 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 939 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
940 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
943 static inline uint32_t CountBitsSet(uint32_t v)
945 uint32_t c = v - ((v >> 1) & 0x55555555);
946 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
947 c = ((c >> 4) + c) & 0x0F0F0F0F;
948 c = ((c >> 8) + c) & 0x00FF00FF;
949 c = ((c >> 16) + c) & 0x0000FFFF;
955 template <
typename T>
956 static inline T VmaAlignUp(T val, T align)
958 return (val + align - 1) / align * align;
962 template <
typename T>
963 inline T VmaRoundDiv(T x, T y)
965 return (x + (y / (T)2)) / y;
970 template<
typename Iterator,
typename Compare>
971 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
973 Iterator centerValue = end; --centerValue;
974 Iterator insertIndex = beg;
975 for(Iterator i = beg; i < centerValue; ++i)
977 if(cmp(*i, *centerValue))
981 VMA_SWAP(*i, *insertIndex);
986 if(insertIndex != centerValue)
988 VMA_SWAP(*insertIndex, *centerValue);
993 template<
typename Iterator,
typename Compare>
994 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
998 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
999 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1000 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1004 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1006 #endif // #ifndef VMA_SORT 1015 static inline bool VmaBlocksOnSamePage(
1016 VkDeviceSize resourceAOffset,
1017 VkDeviceSize resourceASize,
1018 VkDeviceSize resourceBOffset,
1019 VkDeviceSize pageSize)
1021 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1022 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1023 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1024 VkDeviceSize resourceBStart = resourceBOffset;
1025 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1026 return resourceAEndPage == resourceBStartPage;
1029 enum VmaSuballocationType
1031 VMA_SUBALLOCATION_TYPE_FREE = 0,
1032 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1033 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1034 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1035 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1036 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1037 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1046 static inline bool VmaIsBufferImageGranularityConflict(
1047 VmaSuballocationType suballocType1,
1048 VmaSuballocationType suballocType2)
1050 if(suballocType1 > suballocType2)
1052 VMA_SWAP(suballocType1, suballocType2);
1055 switch(suballocType1)
1057 case VMA_SUBALLOCATION_TYPE_FREE:
1059 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
1061 case VMA_SUBALLOCATION_TYPE_BUFFER:
1063 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1064 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1065 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
1067 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
1068 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
1069 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1070 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
1072 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
1073 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
1085 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
1086 m_pMutex(useMutex ? &mutex : VMA_NULL)
1103 VMA_MUTEX* m_pMutex;
1106 #if VMA_DEBUG_GLOBAL_MUTEX 1107 static VMA_MUTEX gDebugGlobalMutex;
1108 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex); 1110 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 1114 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
1125 template <
typename IterT,
typename KeyT,
typename CmpT>
1126 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
1128 size_t down = 0, up = (end - beg);
1131 const size_t mid = (down + up) / 2;
1132 if(cmp(*(beg+mid), key))
1147 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
1149 if((pAllocationCallbacks != VMA_NULL) &&
1150 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
1152 return (*pAllocationCallbacks->pfnAllocation)(
1153 pAllocationCallbacks->pUserData,
1156 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1160 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
1164 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
1166 if((pAllocationCallbacks != VMA_NULL) &&
1167 (pAllocationCallbacks->pfnFree != VMA_NULL))
1169 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
1173 VMA_SYSTEM_FREE(ptr);
1177 template<
typename T>
1178 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
1180 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
1183 template<
typename T>
1184 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
1186 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
1189 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 1191 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 1193 template<
typename T>
1194 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
1197 VmaFree(pAllocationCallbacks, ptr);
1200 template<
typename T>
1201 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
1205 for(
size_t i = count; i--; )
1209 VmaFree(pAllocationCallbacks, ptr);
1214 template<
typename T>
1215 class VmaStlAllocator
1218 const VkAllocationCallbacks*
const m_pCallbacks;
1219 typedef T value_type;
1221 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
1222 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
1224 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
1225 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
1227 template<
typename U>
1228 bool operator==(
const VmaStlAllocator<U>& rhs)
const 1230 return m_pCallbacks == rhs.m_pCallbacks;
1232 template<
typename U>
1233 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 1235 return m_pCallbacks != rhs.m_pCallbacks;
1238 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
1241 #if VMA_USE_STL_VECTOR 1243 #define VmaVector std::vector 1245 template<
typename T,
typename allocatorT>
1246 static void VectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
1248 vec.insert(vec.begin() + index, item);
1251 template<
typename T,
typename allocatorT>
1252 static void VectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
1254 vec.erase(vec.begin() + index);
1257 #else // #if VMA_USE_STL_VECTOR 1262 template<
typename T,
typename AllocatorT>
1266 VmaVector(
const AllocatorT& allocator) :
1267 m_Allocator(allocator),
1274 VmaVector(
size_t count,
const AllocatorT& allocator) :
1275 m_Allocator(allocator),
1276 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator->m_pCallbacks, count) : VMA_NULL),
1282 VmaVector(
const VmaVector<T, AllocatorT>& src) :
1283 m_Allocator(src.m_Allocator),
1284 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src->m_pCallbacks, src.m_Count) : VMA_NULL),
1285 m_Count(src.m_Count),
1286 m_Capacity(src.m_Count)
1290 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
1296 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1299 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
1303 Resize(rhs.m_Count);
1306 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
1312 bool empty()
const {
return m_Count == 0; }
1313 size_t size()
const {
return m_Count; }
1314 T* data() {
return m_pArray; }
1315 const T* data()
const {
return m_pArray; }
1317 T& operator[](
size_t index)
1319 VMA_HEAVY_ASSERT(index < m_Count);
1320 return m_pArray[index];
1322 const T& operator[](
size_t index)
const 1324 VMA_HEAVY_ASSERT(index < m_Count);
1325 return m_pArray[index];
1330 VMA_HEAVY_ASSERT(m_Count > 0);
1333 const T& front()
const 1335 VMA_HEAVY_ASSERT(m_Count > 0);
1340 VMA_HEAVY_ASSERT(m_Count > 0);
1341 return m_pArray[m_Count - 1];
1343 const T& back()
const 1345 VMA_HEAVY_ASSERT(m_Count > 0);
1346 return m_pArray[m_Count - 1];
1349 void reserve(
size_t newCapacity,
bool freeMemory =
false)
1351 newCapacity = VMA_MAX(newCapacity, m_Count);
1353 if((newCapacity < m_Capacity) && !freeMemory)
1355 newCapacity = m_Capacity;
1358 if(newCapacity != m_Capacity)
1360 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
1363 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
1365 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1366 m_Capacity = newCapacity;
1367 m_pArray = newArray;
1371 void resize(
size_t newCount,
bool freeMemory =
false)
1373 size_t newCapacity = m_Capacity;
1374 if(newCount > m_Capacity)
1376 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
1380 newCapacity = newCount;
1383 if(newCapacity != m_Capacity)
1385 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
1386 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
1387 if(elementsToCopy != 0)
1389 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
1391 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
1392 m_Capacity = newCapacity;
1393 m_pArray = newArray;
1399 void clear(
bool freeMemory =
false)
1401 resize(0, freeMemory);
1404 void insert(
size_t index,
const T& src)
1406 VMA_HEAVY_ASSERT(index <= m_Count);
1407 const size_t oldCount = size();
1408 resize(oldCount + 1);
1409 if(index < oldCount)
1411 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
1413 m_pArray[index] = src;
1416 void remove(
size_t index)
1418 VMA_HEAVY_ASSERT(index < m_Count);
1419 const size_t oldCount = size();
1420 if(index < oldCount - 1)
1422 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
1424 resize(oldCount - 1);
1427 void push_back(
const T& src)
1429 const size_t newIndex = size();
1430 resize(newIndex + 1);
1431 m_pArray[newIndex] = src;
1436 VMA_HEAVY_ASSERT(m_Count > 0);
1440 void push_front(
const T& src)
1447 VMA_HEAVY_ASSERT(m_Count > 0);
1451 typedef T* iterator;
1453 iterator begin() {
return m_pArray; }
1454 iterator end() {
return m_pArray + m_Count; }
1457 AllocatorT m_Allocator;
1463 template<
typename T,
typename allocatorT>
1464 static void VectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
1466 vec.insert(index, item);
1469 template<
typename T,
typename allocatorT>
1470 static void VectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
1475 #endif // #if VMA_USE_STL_VECTOR 1485 template<
typename T>
1486 class VmaPoolAllocator
1489 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
1490 ~VmaPoolAllocator();
1498 uint32_t NextFreeIndex;
1505 uint32_t FirstFreeIndex;
1508 const VkAllocationCallbacks* m_pAllocationCallbacks;
1509 size_t m_ItemsPerBlock;
1510 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
1512 ItemBlock& CreateNewBlock();
1515 template<
typename T>
1516 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
1517 m_pAllocationCallbacks(pAllocationCallbacks),
1518 m_ItemsPerBlock(itemsPerBlock),
1519 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
1521 VMA_ASSERT(itemsPerBlock > 0);
1524 template<
typename T>
1525 VmaPoolAllocator<T>::~VmaPoolAllocator()
1530 template<
typename T>
1531 void VmaPoolAllocator<T>::Clear()
1533 for(
size_t i = m_ItemBlocks.size(); i--; )
1534 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
1535 m_ItemBlocks.clear();
1538 template<
typename T>
1539 T* VmaPoolAllocator<T>::Alloc()
1541 for(
size_t i = m_ItemBlocks.size(); i--; )
1543 ItemBlock& block = m_ItemBlocks[i];
1545 if(block.FirstFreeIndex != UINT32_MAX)
1547 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
1548 block.FirstFreeIndex = pItem->NextFreeIndex;
1549 return &pItem->Value;
1554 ItemBlock& newBlock = CreateNewBlock();
1555 Item*
const pItem = &newBlock.pItems[0];
1556 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
1557 return &pItem->Value;
1560 template<
typename T>
1561 void VmaPoolAllocator<T>::Free(T* ptr)
1564 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
1566 ItemBlock& block = m_ItemBlocks[i];
1570 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
1573 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
1575 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
1576 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
1577 block.FirstFreeIndex = index;
1581 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
1584 template<
typename T>
1585 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
1587 ItemBlock newBlock = {
1588 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
1590 m_ItemBlocks.push_back(newBlock);
1593 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
1594 newBlock.pItems[i].NextFreeIndex = i + 1;
1595 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
1596 return m_ItemBlocks.back();
1602 #if VMA_USE_STL_LIST 1604 #define VmaList std::list 1606 #else // #if VMA_USE_STL_LIST 1608 template<
typename T>
1617 template<
typename T>
1621 typedef VmaListItem<T> ItemType;
1623 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
1627 size_t GetCount()
const {
return m_Count; }
1628 bool IsEmpty()
const {
return m_Count == 0; }
1630 ItemType* Front() {
return m_pFront; }
1631 const ItemType* Front()
const {
return m_pFront; }
1632 ItemType* Back() {
return m_pBack; }
1633 const ItemType* Back()
const {
return m_pBack; }
1635 ItemType* PushBack();
1636 ItemType* PushFront();
1637 ItemType* PushBack(
const T& value);
1638 ItemType* PushFront(
const T& value);
1643 ItemType* InsertBefore(ItemType* pItem);
1645 ItemType* InsertAfter(ItemType* pItem);
1647 ItemType* InsertBefore(ItemType* pItem,
const T& value);
1648 ItemType* InsertAfter(ItemType* pItem,
const T& value);
1650 void Remove(ItemType* pItem);
1653 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
1654 VmaPoolAllocator<ItemType> m_ItemAllocator;
1660 VmaRawList(
const VmaRawList<T>& src);
1661 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
1664 template<
typename T>
1665 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
1666 m_pAllocationCallbacks(pAllocationCallbacks),
1667 m_ItemAllocator(pAllocationCallbacks, 128),
1674 template<
typename T>
1675 VmaRawList<T>::~VmaRawList()
1681 template<
typename T>
1682 void VmaRawList<T>::Clear()
1684 if(IsEmpty() ==
false)
1686 ItemType* pItem = m_pBack;
1687 while(pItem != VMA_NULL)
1689 ItemType*
const pPrevItem = pItem->pPrev;
1690 m_ItemAllocator.Free(pItem);
1693 m_pFront = VMA_NULL;
1699 template<
typename T>
1700 VmaListItem<T>* VmaRawList<T>::PushBack()
1702 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
1703 pNewItem->pNext = VMA_NULL;
1706 pNewItem->pPrev = VMA_NULL;
1707 m_pFront = pNewItem;
1713 pNewItem->pPrev = m_pBack;
1714 m_pBack->pNext = pNewItem;
1721 template<
typename T>
1722 VmaListItem<T>* VmaRawList<T>::PushFront()
1724 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
1725 pNewItem->pPrev = VMA_NULL;
1728 pNewItem->pNext = VMA_NULL;
1729 m_pFront = pNewItem;
1735 pNewItem->pNext = m_pFront;
1736 m_pFront->pPrev = pNewItem;
1737 m_pFront = pNewItem;
1743 template<
typename T>
1744 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
1746 ItemType*
const pNewItem = PushBack();
1747 pNewItem->Value = value;
1751 template<
typename T>
1752 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
1754 ItemType*
const pNewItem = PushFront();
1755 pNewItem->Value = value;
1759 template<
typename T>
1760 void VmaRawList<T>::PopBack()
1762 VMA_HEAVY_ASSERT(m_Count > 0);
1763 ItemType*
const pBackItem = m_pBack;
1764 ItemType*
const pPrevItem = pBackItem->pPrev;
1765 if(pPrevItem != VMA_NULL)
1767 pPrevItem->pNext = VMA_NULL;
1769 m_pBack = pPrevItem;
1770 m_ItemAllocator.Free(pBackItem);
1774 template<
typename T>
1775 void VmaRawList<T>::PopFront()
1777 VMA_HEAVY_ASSERT(m_Count > 0);
1778 ItemType*
const pFrontItem = m_pFront;
1779 ItemType*
const pNextItem = pFrontItem->pNext;
1780 if(pNextItem != VMA_NULL)
1782 pNextItem->pPrev = VMA_NULL;
1784 m_pFront = pNextItem;
1785 m_ItemAllocator.Free(pFrontItem);
1789 template<
typename T>
1790 void VmaRawList<T>::Remove(ItemType* pItem)
1792 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
1793 VMA_HEAVY_ASSERT(m_Count > 0);
1795 if(pItem->pPrev != VMA_NULL)
1797 pItem->pPrev->pNext = pItem->pNext;
1801 VMA_HEAVY_ASSERT(m_pFront == pItem);
1802 m_pFront = pItem->pNext;
1805 if(pItem->pNext != VMA_NULL)
1807 pItem->pNext->pPrev = pItem->pPrev;
1811 VMA_HEAVY_ASSERT(m_pBack == pItem);
1812 m_pBack = pItem->pPrev;
1815 m_ItemAllocator.Free(pItem);
1819 template<
typename T>
1820 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
1822 if(pItem != VMA_NULL)
1824 ItemType*
const prevItem = pItem->pPrev;
1825 ItemType*
const newItem = m_ItemAllocator.Alloc();
1826 newItem->pPrev = prevItem;
1827 newItem->pNext = pItem;
1828 pItem->pPrev = newItem;
1829 if(prevItem != VMA_NULL)
1831 prevItem->pNext = newItem;
1835 VMA_HEAVY_ASSERT(m_pFront == pItem);
1845 template<
typename T>
1846 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
1848 if(pItem != VMA_NULL)
1850 ItemType*
const nextItem = pItem->pNext;
1851 ItemType*
const newItem = m_ItemAllocator.Alloc();
1852 newItem->pNext = nextItem;
1853 newItem->pPrev = pItem;
1854 pItem->pNext = newItem;
1855 if(nextItem != VMA_NULL)
1857 nextItem->pPrev = newItem;
1861 VMA_HEAVY_ASSERT(m_pBack == pItem);
1871 template<
typename T>
1872 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
1874 ItemType*
const newItem = InsertBefore(pItem);
1875 newItem->Value = value;
1879 template<
typename T>
1880 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
1882 ItemType*
const newItem = InsertAfter(pItem);
1883 newItem->Value = value;
1887 template<
typename T,
typename AllocatorT>
1900 T& operator*()
const 1902 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1903 return m_pItem->Value;
1905 T* operator->()
const 1907 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1908 return &m_pItem->Value;
1911 iterator& operator++()
1913 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1914 m_pItem = m_pItem->pNext;
1917 iterator& operator--()
1919 if(m_pItem != VMA_NULL)
1921 m_pItem = m_pItem->pPrev;
1925 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
1926 m_pItem = m_pList->Back();
1931 iterator operator++(
int)
1933 iterator result = *
this;
1937 iterator operator--(
int)
1939 iterator result = *
this;
1944 bool operator==(
const iterator& rhs)
const 1946 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1947 return m_pItem == rhs.m_pItem;
1949 bool operator!=(
const iterator& rhs)
const 1951 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
1952 return m_pItem != rhs.m_pItem;
1956 VmaRawList<T>* m_pList;
1957 VmaListItem<T>* m_pItem;
1959 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
1965 friend class VmaList<T, AllocatorT>;
1966 friend class VmaList<T, AllocatorT>:: const_iterator;
1969 class const_iterator
1978 const_iterator(
const iterator& src) :
1979 m_pList(src.m_pList),
1980 m_pItem(src.m_pItem)
1984 const T& operator*()
const 1986 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1987 return m_pItem->Value;
1989 const T* operator->()
const 1991 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1992 return &m_pItem->Value;
1995 const_iterator& operator++()
1997 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
1998 m_pItem = m_pItem->pNext;
2001 const_iterator& operator--()
2003 if(m_pItem != VMA_NULL)
2005 m_pItem = m_pItem->pPrev;
2009 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2010 m_pItem = m_pList->Back();
2015 const_iterator operator++(
int)
2017 const_iterator result = *
this;
2021 const_iterator operator--(
int)
2023 const_iterator result = *
this;
2028 bool operator==(
const const_iterator& rhs)
const 2030 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2031 return m_pItem == rhs.m_pItem;
2033 bool operator!=(
const const_iterator& rhs)
const 2035 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2036 return m_pItem != rhs.m_pItem;
2040 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
2046 const VmaRawList<T>* m_pList;
2047 const VmaListItem<T>* m_pItem;
2049 friend class VmaList<T, AllocatorT>;
2052 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
2054 bool empty()
const {
return m_RawList.IsEmpty(); }
2055 size_t size()
const {
return m_RawList.GetCount(); }
2057 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
2058 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
2060 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
2061 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
2063 void clear() { m_RawList.Clear(); }
2064 void push_back(
const T& value) { m_RawList.PushBack(value); }
2065 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
2066 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
2069 VmaRawList<T> m_RawList;
2072 #endif // #if VMA_USE_STL_LIST 2077 #if VMA_USE_STL_UNORDERED_MAP 2079 #define VmaPair std::pair 2081 #define VMA_MAP_TYPE(KeyT, ValueT) \ 2082 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 2084 #else // #if VMA_USE_STL_UNORDERED_MAP 2086 template<
typename T1,
typename T2>
2092 VmaPair() : first(), second() { }
2093 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
2099 template<
typename KeyT,
typename ValueT>
2103 typedef VmaPair<KeyT, ValueT> PairType;
2104 typedef PairType* iterator;
2106 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
2108 iterator begin() {
return m_Vector.begin(); }
2109 iterator end() {
return m_Vector.end(); }
2111 void insert(
const PairType& pair);
2112 iterator find(
const KeyT& key);
2113 void erase(iterator it);
2116 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
2119 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 2121 template<
typename FirstT,
typename SecondT>
2122 struct VmaPairFirstLess
2124 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 2126 return lhs.first < rhs.first;
2128 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 2130 return lhs.first < rhsFirst;
2134 template<
typename KeyT,
typename ValueT>
2135 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
2137 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2139 m_Vector.data() + m_Vector.size(),
2141 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
2142 VectorInsert(m_Vector, indexToInsert, pair);
2145 template<
typename KeyT,
typename ValueT>
2146 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
2148 PairType* it = VmaBinaryFindFirstNotLess(
2150 m_Vector.data() + m_Vector.size(),
2152 VmaPairFirstLess<KeyT, ValueT>());
2153 if((it != m_Vector.end()) && (it->first == key))
2159 return m_Vector.end();
2163 template<
typename KeyT,
typename ValueT>
2164 void VmaMap<KeyT, ValueT>::erase(iterator it)
2166 VectorRemove(m_Vector, it - m_Vector.begin());
2169 #endif // #if VMA_USE_STL_UNORDERED_MAP 2175 enum VMA_BLOCK_VECTOR_TYPE
2177 VMA_BLOCK_VECTOR_TYPE_UNMAPPED,
2178 VMA_BLOCK_VECTOR_TYPE_MAPPED,
2179 VMA_BLOCK_VECTOR_TYPE_COUNT
2185 VMA_BLOCK_VECTOR_TYPE_MAPPED :
2186 VMA_BLOCK_VECTOR_TYPE_UNMAPPED;
2189 struct VmaAllocation_T
2192 enum ALLOCATION_TYPE
2194 ALLOCATION_TYPE_NONE,
2195 ALLOCATION_TYPE_BLOCK,
2196 ALLOCATION_TYPE_OWN,
2201 memset(
this, 0,
sizeof(VmaAllocation_T));
2204 void InitBlockAllocation(
2206 VkDeviceSize offset,
2207 VkDeviceSize alignment,
2209 VmaSuballocationType suballocationType,
2212 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2213 VMA_ASSERT(block != VMA_NULL);
2214 m_Type = ALLOCATION_TYPE_BLOCK;
2215 m_Alignment = alignment;
2217 m_pUserData = pUserData;
2218 m_SuballocationType = suballocationType;
2219 m_BlockAllocation.m_Block = block;
2220 m_BlockAllocation.m_Offset = offset;
2223 void ChangeBlockAllocation(
2225 VkDeviceSize offset)
2227 VMA_ASSERT(block != VMA_NULL);
2228 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2229 m_BlockAllocation.m_Block = block;
2230 m_BlockAllocation.m_Offset = offset;
2233 void InitOwnAllocation(
2234 uint32_t memoryTypeIndex,
2235 VkDeviceMemory hMemory,
2236 VmaSuballocationType suballocationType,
2242 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
2243 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
2244 m_Type = ALLOCATION_TYPE_OWN;
2247 m_pUserData = pUserData;
2248 m_SuballocationType = suballocationType;
2249 m_OwnAllocation.m_MemoryTypeIndex = memoryTypeIndex;
2250 m_OwnAllocation.m_hMemory = hMemory;
2251 m_OwnAllocation.m_PersistentMap = persistentMap;
2252 m_OwnAllocation.m_pMappedData = pMappedData;
2255 ALLOCATION_TYPE GetType()
const {
return m_Type; }
2256 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
2257 VkDeviceSize GetSize()
const {
return m_Size; }
2258 void* GetUserData()
const {
return m_pUserData; }
2259 void SetUserData(
void* pUserData) { m_pUserData = pUserData; }
2260 VmaSuballocationType GetSuballocationType()
const {
return m_SuballocationType; }
2262 VmaBlock* GetBlock()
const 2264 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
2265 return m_BlockAllocation.m_Block;
2267 VkDeviceSize GetOffset()
const 2269 return (m_Type == ALLOCATION_TYPE_BLOCK) ? m_BlockAllocation.m_Offset : 0;
2271 VkDeviceMemory GetMemory()
const;
2272 uint32_t GetMemoryTypeIndex()
const;
2273 VMA_BLOCK_VECTOR_TYPE GetBlockVectorType()
const;
2274 void* GetMappedData()
const;
2276 VkResult OwnAllocMapPersistentlyMappedMemory(VkDevice hDevice)
2278 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2279 if(m_OwnAllocation.m_PersistentMap)
2281 return vkMapMemory(hDevice, m_OwnAllocation.m_hMemory, 0, VK_WHOLE_SIZE, 0, &m_OwnAllocation.m_pMappedData);
2285 void OwnAllocUnmapPersistentlyMappedMemory(VkDevice hDevice)
2287 VMA_ASSERT(m_Type == ALLOCATION_TYPE_OWN);
2288 if(m_OwnAllocation.m_pMappedData)
2290 VMA_ASSERT(m_OwnAllocation.m_PersistentMap);
2291 vkUnmapMemory(hDevice, m_OwnAllocation.m_hMemory);
2292 m_OwnAllocation.m_pMappedData = VMA_NULL;
2297 VkDeviceSize m_Alignment;
2298 VkDeviceSize m_Size;
2300 ALLOCATION_TYPE m_Type;
2301 VmaSuballocationType m_SuballocationType;
2304 struct BlockAllocation
2307 VkDeviceSize m_Offset;
2311 struct OwnAllocation
2313 uint32_t m_MemoryTypeIndex;
2314 VkDeviceMemory m_hMemory;
2315 bool m_PersistentMap;
2316 void* m_pMappedData;
2322 BlockAllocation m_BlockAllocation;
2324 OwnAllocation m_OwnAllocation;
2332 struct VmaSuballocation
2334 VkDeviceSize offset;
2336 VmaSuballocationType type;
2339 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
2342 struct VmaAllocationRequest
2344 VmaSuballocationList::iterator freeSuballocationItem;
2345 VkDeviceSize offset;
2353 uint32_t m_MemoryTypeIndex;
2354 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
2355 VkDeviceMemory m_hMemory;
2356 VkDeviceSize m_Size;
2357 bool m_PersistentMap;
2358 void* m_pMappedData;
2359 uint32_t m_FreeCount;
2360 VkDeviceSize m_SumFreeSize;
2361 VmaSuballocationList m_Suballocations;
2364 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
2366 VmaBlock(VmaAllocator hAllocator);
2370 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
2375 uint32_t newMemoryTypeIndex,
2376 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
2377 VkDeviceMemory newMemory,
2378 VkDeviceSize newSize,
2382 void Destroy(VmaAllocator allocator);
2385 bool Validate()
const;
2390 bool CreateAllocationRequest(
2391 VkDeviceSize bufferImageGranularity,
2392 VkDeviceSize allocSize,
2393 VkDeviceSize allocAlignment,
2394 VmaSuballocationType allocType,
2395 VmaAllocationRequest* pAllocationRequest);
2399 bool CheckAllocation(
2400 VkDeviceSize bufferImageGranularity,
2401 VkDeviceSize allocSize,
2402 VkDeviceSize allocAlignment,
2403 VmaSuballocationType allocType,
2404 VmaSuballocationList::const_iterator freeSuballocItem,
2405 VkDeviceSize* pOffset)
const;
2408 bool IsEmpty()
const;
2413 const VmaAllocationRequest& request,
2414 VmaSuballocationType type,
2415 VkDeviceSize allocSize);
2418 void Free(
const VmaAllocation allocation);
2420 #if VMA_STATS_STRING_ENABLED 2421 void PrintDetailedMap(
class VmaStringBuilder& sb)
const;
2426 void MergeFreeWithNext(VmaSuballocationList::iterator item);
2429 void FreeSuballocation(VmaSuballocationList::iterator suballocItem);
2432 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
2435 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
2438 struct VmaPointerLess
2440 bool operator()(
const void* lhs,
const void* rhs)
const 2448 struct VmaBlockVector
2451 VmaVector< VmaBlock*, VmaStlAllocator<VmaBlock*> > m_Blocks;
2453 VmaBlockVector(VmaAllocator hAllocator);
2456 bool IsEmpty()
const {
return m_Blocks.empty(); }
2459 void Remove(VmaBlock* pBlock);
2463 void IncrementallySortBlocks();
2466 void AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
const;
2468 #if VMA_STATS_STRING_ENABLED 2469 void PrintDetailedMap(
class VmaStringBuilder& sb)
const;
2472 void UnmapPersistentlyMappedMemory();
2473 VkResult MapPersistentlyMappedMemory();
2476 VmaAllocator m_hAllocator;
2480 struct VmaAllocator_T
2484 bool m_AllocationCallbacksSpecified;
2485 VkAllocationCallbacks m_AllocationCallbacks;
2487 VkDeviceSize m_PreferredLargeHeapBlockSize;
2488 VkDeviceSize m_PreferredSmallHeapBlockSize;
2491 uint32_t m_UnmapPersistentlyMappedMemoryCounter;
2493 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
2494 VkPhysicalDeviceMemoryProperties m_MemProps;
2496 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
2500 bool m_HasEmptyBlock[VK_MAX_MEMORY_TYPES];
2501 VMA_MUTEX m_BlocksMutex[VK_MAX_MEMORY_TYPES];
2504 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
2505 AllocationVectorType* m_pOwnAllocations[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
2506 VMA_MUTEX m_OwnAllocationsMutex[VK_MAX_MEMORY_TYPES];
2511 const VkAllocationCallbacks* GetAllocationCallbacks()
const 2513 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
2516 VkDeviceSize GetPreferredBlockSize(uint32_t memTypeIndex)
const;
2518 VkDeviceSize GetBufferImageGranularity()
const 2521 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
2522 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
2525 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
2526 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
2529 VkResult AllocateMemory(
2530 const VkMemoryRequirements& vkMemReq,
2532 VmaSuballocationType suballocType,
2533 VmaAllocation* pAllocation);
2536 void FreeMemory(
const VmaAllocation allocation);
2538 void CalculateStats(
VmaStats* pStats);
2540 #if VMA_STATS_STRING_ENABLED 2541 void PrintDetailedMap(
class VmaStringBuilder& sb);
2544 void UnmapPersistentlyMappedMemory();
2545 VkResult MapPersistentlyMappedMemory();
2547 VkResult Defragment(
2548 VmaAllocation* pAllocations,
2549 size_t allocationCount,
2550 VkBool32* pAllocationsChanged,
2554 static void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
2557 VkPhysicalDevice m_PhysicalDevice;
2559 VkResult AllocateMemoryOfType(
2560 const VkMemoryRequirements& vkMemReq,
2562 uint32_t memTypeIndex,
2563 VmaSuballocationType suballocType,
2564 VmaAllocation* pAllocation);
2567 VkResult AllocateOwnMemory(
2569 VmaSuballocationType suballocType,
2570 uint32_t memTypeIndex,
2573 VmaAllocation* pAllocation);
2576 void FreeOwnMemory(VmaAllocation allocation);
2582 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
2584 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
2587 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
2589 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
2592 template<
typename T>
2593 static T* VmaAllocate(VmaAllocator hAllocator)
2595 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
2598 template<
typename T>
2599 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
2601 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
2604 template<
typename T>
2605 static void vma_delete(VmaAllocator hAllocator, T* ptr)
2610 VmaFree(hAllocator, ptr);
2614 template<
typename T>
2615 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
2619 for(
size_t i = count; i--; )
2621 VmaFree(hAllocator, ptr);
2628 #if VMA_STATS_STRING_ENABLED 2630 class VmaStringBuilder
2633 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
2634 size_t GetLength()
const {
return m_Data.size(); }
2635 const char* GetData()
const {
return m_Data.data(); }
2637 void Add(
char ch) { m_Data.push_back(ch); }
2638 void Add(
const char* pStr);
2639 void AddNewLine() { Add(
'\n'); }
2640 void AddNumber(uint32_t num);
2641 void AddNumber(uint64_t num);
2642 void AddBool(
bool b) { Add(b ?
"true" :
"false"); }
2643 void AddNull() { Add(
"null"); }
2644 void AddString(
const char* pStr);
2647 VmaVector< char, VmaStlAllocator<char> > m_Data;
2650 void VmaStringBuilder::Add(
const char* pStr)
2652 const size_t strLen = strlen(pStr);
2655 const size_t oldCount = m_Data.size();
2656 m_Data.resize(oldCount + strLen);
2657 memcpy(m_Data.data() + oldCount, pStr, strLen);
2661 void VmaStringBuilder::AddNumber(uint32_t num)
2664 VmaUint32ToStr(buf,
sizeof(buf), num);
2668 void VmaStringBuilder::AddNumber(uint64_t num)
2671 VmaUint64ToStr(buf,
sizeof(buf), num);
2675 void VmaStringBuilder::AddString(
const char* pStr)
2678 const size_t strLen = strlen(pStr);
2679 for(
size_t i = 0; i < strLen; ++i)
2706 VMA_ASSERT(0 &&
"Character not currently supported.");
2715 VkDeviceMemory VmaAllocation_T::GetMemory()
const 2717 return (m_Type == ALLOCATION_TYPE_BLOCK) ?
2718 m_BlockAllocation.m_Block->m_hMemory : m_OwnAllocation.m_hMemory;
2721 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 2723 return (m_Type == ALLOCATION_TYPE_BLOCK) ?
2724 m_BlockAllocation.m_Block->m_MemoryTypeIndex : m_OwnAllocation.m_MemoryTypeIndex;
2727 VMA_BLOCK_VECTOR_TYPE VmaAllocation_T::GetBlockVectorType()
const 2729 return (m_Type == ALLOCATION_TYPE_BLOCK) ?
2730 m_BlockAllocation.m_Block->m_BlockVectorType :
2731 (m_OwnAllocation.m_PersistentMap ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED);
2734 void* VmaAllocation_T::GetMappedData()
const 2738 case ALLOCATION_TYPE_BLOCK:
2739 if(m_BlockAllocation.m_Block->m_pMappedData != VMA_NULL)
2741 return (
char*)m_BlockAllocation.m_Block->m_pMappedData + m_BlockAllocation.m_Offset;
2748 case ALLOCATION_TYPE_OWN:
2749 return m_OwnAllocation.m_pMappedData;
2757 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
2766 static void VmaPrintStatInfo(VmaStringBuilder& sb,
const VmaStatInfo& stat)
2768 sb.Add(
"{ \"Allocations\": ");
2770 sb.Add(
", \"Suballocations\": ");
2772 sb.Add(
", \"UnusedRanges\": ");
2774 sb.Add(
", \"UsedBytes\": ");
2776 sb.Add(
", \"UnusedBytes\": ");
2778 sb.Add(
", \"SuballocationSize\": { \"Min\": ");
2780 sb.Add(
", \"Avg\": ");
2782 sb.Add(
", \"Max\": ");
2784 sb.Add(
" }, \"UnusedRangeSize\": { \"Min\": ");
2786 sb.Add(
", \"Avg\": ");
2788 sb.Add(
", \"Max\": ");
2793 #endif // #if VMA_STATS_STRING_ENABLED 2795 struct VmaSuballocationItemSizeLess
2798 const VmaSuballocationList::iterator lhs,
2799 const VmaSuballocationList::iterator rhs)
const 2801 return lhs->size < rhs->size;
2804 const VmaSuballocationList::iterator lhs,
2805 VkDeviceSize rhsSize)
const 2807 return lhs->size < rhsSize;
2811 VmaBlock::VmaBlock(VmaAllocator hAllocator) :
2812 m_MemoryTypeIndex(UINT32_MAX),
2813 m_BlockVectorType(VMA_BLOCK_VECTOR_TYPE_COUNT),
2814 m_hMemory(VK_NULL_HANDLE),
2816 m_PersistentMap(false),
2817 m_pMappedData(VMA_NULL),
2820 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
2821 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
2825 void VmaBlock::Init(
2826 uint32_t newMemoryTypeIndex,
2827 VMA_BLOCK_VECTOR_TYPE newBlockVectorType,
2828 VkDeviceMemory newMemory,
2829 VkDeviceSize newSize,
2833 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
2835 m_MemoryTypeIndex = newMemoryTypeIndex;
2836 m_BlockVectorType = newBlockVectorType;
2837 m_hMemory = newMemory;
2839 m_PersistentMap = persistentMap;
2840 m_pMappedData = pMappedData;
2842 m_SumFreeSize = newSize;
2844 m_Suballocations.clear();
2845 m_FreeSuballocationsBySize.clear();
2847 VmaSuballocation suballoc = {};
2848 suballoc.offset = 0;
2849 suballoc.size = newSize;
2850 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
2852 m_Suballocations.push_back(suballoc);
2853 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
2855 m_FreeSuballocationsBySize.push_back(suballocItem);
2858 void VmaBlock::Destroy(VmaAllocator allocator)
2860 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
2861 if(m_pMappedData != VMA_NULL)
2863 vkUnmapMemory(allocator->m_hDevice, m_hMemory);
2864 m_pMappedData = VMA_NULL;
2868 if(allocator->m_DeviceMemoryCallbacks.pfnFree != VMA_NULL)
2870 (*allocator->m_DeviceMemoryCallbacks.pfnFree)(allocator, m_MemoryTypeIndex, m_hMemory, m_Size);
2873 vkFreeMemory(allocator->m_hDevice, m_hMemory, allocator->GetAllocationCallbacks());
2874 m_hMemory = VK_NULL_HANDLE;
2877 bool VmaBlock::Validate()
const 2879 if((m_hMemory == VK_NULL_HANDLE) ||
2881 m_Suballocations.empty())
2887 VkDeviceSize calculatedOffset = 0;
2889 uint32_t calculatedFreeCount = 0;
2891 VkDeviceSize calculatedSumFreeSize = 0;
2894 size_t freeSuballocationsToRegister = 0;
2896 bool prevFree =
false;
2898 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
2899 suballocItem != m_Suballocations.cend();
2902 const VmaSuballocation& subAlloc = *suballocItem;
2905 if(subAlloc.offset != calculatedOffset)
2910 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
2912 if(prevFree && currFree)
2916 prevFree = currFree;
2920 calculatedSumFreeSize += subAlloc.size;
2921 ++calculatedFreeCount;
2922 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
2924 ++freeSuballocationsToRegister;
2928 calculatedOffset += subAlloc.size;
2933 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
2938 VkDeviceSize lastSize = 0;
2939 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
2941 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
2944 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
2949 if(suballocItem->size < lastSize)
2954 lastSize = suballocItem->size;
2959 (calculatedOffset == m_Size) &&
2960 (calculatedSumFreeSize == m_SumFreeSize) &&
2961 (calculatedFreeCount == m_FreeCount);
2974 bool VmaBlock::CreateAllocationRequest(
2975 VkDeviceSize bufferImageGranularity,
2976 VkDeviceSize allocSize,
2977 VkDeviceSize allocAlignment,
2978 VmaSuballocationType allocType,
2979 VmaAllocationRequest* pAllocationRequest)
2981 VMA_ASSERT(allocSize > 0);
2982 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
2983 VMA_ASSERT(pAllocationRequest != VMA_NULL);
2984 VMA_HEAVY_ASSERT(Validate());
2987 if(m_SumFreeSize < allocSize)
3022 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
3023 if(freeSuballocCount > 0)
3028 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
3029 m_FreeSuballocationsBySize.data(),
3030 m_FreeSuballocationsBySize.data() + freeSuballocCount,
3032 VmaSuballocationItemSizeLess());
3033 size_t index = it - m_FreeSuballocationsBySize.data();
3034 for(; index < freeSuballocCount; ++index)
3036 VkDeviceSize offset = 0;
3037 const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index];
3038 if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset))
3040 pAllocationRequest->freeSuballocationItem = suballocItem;
3041 pAllocationRequest->offset = offset;
3049 for(
size_t index = freeSuballocCount; index--; )
3051 VkDeviceSize offset = 0;
3052 const VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[index];
3053 if(CheckAllocation(bufferImageGranularity, allocSize, allocAlignment, allocType, suballocItem, &offset))
3055 pAllocationRequest->freeSuballocationItem = suballocItem;
3056 pAllocationRequest->offset = offset;
3066 bool VmaBlock::CheckAllocation(
3067 VkDeviceSize bufferImageGranularity,
3068 VkDeviceSize allocSize,
3069 VkDeviceSize allocAlignment,
3070 VmaSuballocationType allocType,
3071 VmaSuballocationList::const_iterator freeSuballocItem,
3072 VkDeviceSize* pOffset)
const 3074 VMA_ASSERT(allocSize > 0);
3075 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
3076 VMA_ASSERT(freeSuballocItem != m_Suballocations.cend());
3077 VMA_ASSERT(pOffset != VMA_NULL);
3079 const VmaSuballocation& suballoc = *freeSuballocItem;
3080 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
3083 if(suballoc.size < allocSize)
3089 *pOffset = suballoc.offset;
3092 if((VMA_DEBUG_MARGIN > 0) && freeSuballocItem != m_Suballocations.cbegin())
3094 *pOffset += VMA_DEBUG_MARGIN;
3098 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
3099 *pOffset = VmaAlignUp(*pOffset, alignment);
3103 if(bufferImageGranularity > 1)
3105 bool bufferImageGranularityConflict =
false;
3106 VmaSuballocationList::const_iterator prevSuballocItem = freeSuballocItem;
3107 while(prevSuballocItem != m_Suballocations.cbegin())
3110 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
3111 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
3113 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
3115 bufferImageGranularityConflict =
true;
3123 if(bufferImageGranularityConflict)
3125 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
3130 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
3133 VmaSuballocationList::const_iterator next = freeSuballocItem;
3135 const VkDeviceSize requiredEndMargin =
3136 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
3139 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
3146 if(bufferImageGranularity > 1)
3148 VmaSuballocationList::const_iterator nextSuballocItem = freeSuballocItem;
3150 while(nextSuballocItem != m_Suballocations.cend())
3152 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
3153 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
3155 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
3173 bool VmaBlock::IsEmpty()
const 3175 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
3178 void VmaBlock::Alloc(
3179 const VmaAllocationRequest& request,
3180 VmaSuballocationType type,
3181 VkDeviceSize allocSize)
3183 VMA_ASSERT(request.freeSuballocationItem != m_Suballocations.end());
3184 VmaSuballocation& suballoc = *request.freeSuballocationItem;
3186 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
3188 VMA_ASSERT(request.offset >= suballoc.offset);
3189 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
3190 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
3191 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
3195 UnregisterFreeSuballocation(request.freeSuballocationItem);
3197 suballoc.offset = request.offset;
3198 suballoc.size = allocSize;
3199 suballoc.type = type;
3204 VmaSuballocation paddingSuballoc = {};
3205 paddingSuballoc.offset = request.offset + allocSize;
3206 paddingSuballoc.size = paddingEnd;
3207 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
3208 VmaSuballocationList::iterator next = request.freeSuballocationItem;
3210 const VmaSuballocationList::iterator paddingEndItem =
3211 m_Suballocations.insert(next, paddingSuballoc);
3212 RegisterFreeSuballocation(paddingEndItem);
3218 VmaSuballocation paddingSuballoc = {};
3219 paddingSuballoc.offset = request.offset - paddingBegin;
3220 paddingSuballoc.size = paddingBegin;
3221 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
3222 const VmaSuballocationList::iterator paddingBeginItem =
3223 m_Suballocations.insert(request.freeSuballocationItem, paddingSuballoc);
3224 RegisterFreeSuballocation(paddingBeginItem);
3228 m_FreeCount = m_FreeCount - 1;
3229 if(paddingBegin > 0)
3237 m_SumFreeSize -= allocSize;
3240 void VmaBlock::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
3243 VmaSuballocation& suballoc = *suballocItem;
3244 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
3248 m_SumFreeSize += suballoc.size;
3251 bool mergeWithNext =
false;
3252 bool mergeWithPrev =
false;
3254 VmaSuballocationList::iterator nextItem = suballocItem;
3256 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
3258 mergeWithNext =
true;
3261 VmaSuballocationList::iterator prevItem = suballocItem;
3262 if(suballocItem != m_Suballocations.begin())
3265 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
3267 mergeWithPrev =
true;
3273 UnregisterFreeSuballocation(nextItem);
3274 MergeFreeWithNext(suballocItem);
3279 UnregisterFreeSuballocation(prevItem);
3280 MergeFreeWithNext(prevItem);
3281 RegisterFreeSuballocation(prevItem);
3284 RegisterFreeSuballocation(suballocItem);
3287 void VmaBlock::Free(
const VmaAllocation allocation)
3289 const VkDeviceSize allocationOffset = allocation->GetOffset();
3290 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
3291 suballocItem != m_Suballocations.end();
3294 VmaSuballocation& suballoc = *suballocItem;
3295 if(suballoc.offset == allocationOffset)
3297 FreeSuballocation(suballocItem);
3298 VMA_HEAVY_ASSERT(Validate());
3302 VMA_ASSERT(0 &&
"Not found!");
3305 #if VMA_STATS_STRING_ENABLED 3307 void VmaBlock::PrintDetailedMap(
class VmaStringBuilder& sb)
const 3309 sb.Add(
"{\n\t\t\t\"Bytes\": ");
3310 sb.AddNumber(m_Size);
3311 sb.Add(
",\n\t\t\t\"FreeBytes\": ");
3312 sb.AddNumber(m_SumFreeSize);
3313 sb.Add(
",\n\t\t\t\"Suballocations\": ");
3314 sb.AddNumber(m_Suballocations.size());
3315 sb.Add(
",\n\t\t\t\"FreeSuballocations\": ");
3316 sb.AddNumber(m_FreeCount);
3317 sb.Add(
",\n\t\t\t\"SuballocationList\": [");
3320 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
3321 suballocItem != m_Suballocations.cend();
3322 ++suballocItem, ++i)
3326 sb.Add(
",\n\t\t\t\t{ \"Type\": ");
3330 sb.Add(
"\n\t\t\t\t{ \"Type\": ");
3332 sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
3333 sb.Add(
", \"Size\": ");
3334 sb.AddNumber(suballocItem->size);
3335 sb.Add(
", \"Offset\": ");
3336 sb.AddNumber(suballocItem->offset);
3340 sb.Add(
"\n\t\t\t]\n\t\t}");
3343 #endif // #if VMA_STATS_STRING_ENABLED 3345 void VmaBlock::MergeFreeWithNext(VmaSuballocationList::iterator item)
3347 VMA_ASSERT(item != m_Suballocations.end());
3348 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
3350 VmaSuballocationList::iterator nextItem = item;
3352 VMA_ASSERT(nextItem != m_Suballocations.end());
3353 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
3355 item->size += nextItem->size;
3357 m_Suballocations.erase(nextItem);
3360 void VmaBlock::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
3362 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
3363 VMA_ASSERT(item->size > 0);
3365 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
3367 if(m_FreeSuballocationsBySize.empty())
3369 m_FreeSuballocationsBySize.push_back(item);
3373 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
3374 m_FreeSuballocationsBySize.data(),
3375 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
3377 VmaSuballocationItemSizeLess());
3378 size_t index = it - m_FreeSuballocationsBySize.data();
3379 VectorInsert(m_FreeSuballocationsBySize, index, item);
3384 void VmaBlock::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
3386 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
3387 VMA_ASSERT(item->size > 0);
3389 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
3391 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
3392 m_FreeSuballocationsBySize.data(),
3393 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
3395 VmaSuballocationItemSizeLess());
3396 for(
size_t index = it - m_FreeSuballocationsBySize.data();
3397 index < m_FreeSuballocationsBySize.size();
3400 if(m_FreeSuballocationsBySize[index] == item)
3402 VectorRemove(m_FreeSuballocationsBySize, index);
3405 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
3407 VMA_ASSERT(0 &&
"Not found.");
3413 memset(&outInfo, 0,
sizeof(outInfo));
3418 static void CalcAllocationStatInfo(
VmaStatInfo& outInfo,
const VmaBlock& alloc)
3422 const uint32_t rangeCount = (uint32_t)alloc.m_Suballocations.size();
3434 for(VmaSuballocationList::const_iterator suballocItem = alloc.m_Suballocations.cbegin();
3435 suballocItem != alloc.m_Suballocations.cend();
3438 const VmaSuballocation& suballoc = *suballocItem;
3439 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
3466 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
3474 VmaBlockVector::VmaBlockVector(VmaAllocator hAllocator) :
3475 m_hAllocator(hAllocator),
3476 m_Blocks(VmaStlAllocator<VmaBlock*>(hAllocator->GetAllocationCallbacks()))
3480 VmaBlockVector::~VmaBlockVector()
3482 for(
size_t i = m_Blocks.size(); i--; )
3484 m_Blocks[i]->Destroy(m_hAllocator);
3485 vma_delete(m_hAllocator, m_Blocks[i]);
3489 void VmaBlockVector::Remove(VmaBlock* pBlock)
3491 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
3493 if(m_Blocks[blockIndex] == pBlock)
3495 VectorRemove(m_Blocks, blockIndex);
3502 void VmaBlockVector::IncrementallySortBlocks()
3505 for(
size_t i = 1; i < m_Blocks.size(); ++i)
3507 if(m_Blocks[i - 1]->m_SumFreeSize > m_Blocks[i]->m_SumFreeSize)
3509 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
3515 #if VMA_STATS_STRING_ENABLED 3517 void VmaBlockVector::PrintDetailedMap(
class VmaStringBuilder& sb)
const 3519 for(
size_t i = 0; i < m_Blocks.size(); ++i)
3529 m_Blocks[i]->PrintDetailedMap(sb);
3533 #endif // #if VMA_STATS_STRING_ENABLED 3535 void VmaBlockVector::UnmapPersistentlyMappedMemory()
3537 for(
size_t i = m_Blocks.size(); i--; )
3539 VmaBlock* pBlock = m_Blocks[i];
3540 if(pBlock->m_pMappedData != VMA_NULL)
3542 VMA_ASSERT(pBlock->m_PersistentMap !=
false);
3543 vkUnmapMemory(m_hAllocator->m_hDevice, pBlock->m_hMemory);
3544 pBlock->m_pMappedData = VMA_NULL;
3549 VkResult VmaBlockVector::MapPersistentlyMappedMemory()
3551 VkResult finalResult = VK_SUCCESS;
3552 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
3554 VmaBlock* pBlock = m_Blocks[i];
3555 if(pBlock->m_PersistentMap)
3557 VMA_ASSERT(pBlock->m_pMappedData ==
nullptr);
3558 VkResult localResult = vkMapMemory(m_hAllocator->m_hDevice, pBlock->m_hMemory, 0, VK_WHOLE_SIZE, 0, &pBlock->m_pMappedData);
3559 if(localResult != VK_SUCCESS)
3561 finalResult = localResult;
3568 void VmaBlockVector::AddStats(
VmaStats* pStats, uint32_t memTypeIndex, uint32_t memHeapIndex)
const 3570 for(uint32_t allocIndex = 0; allocIndex < m_Blocks.size(); ++allocIndex)
3572 const VmaBlock*
const pBlock = m_Blocks[allocIndex];
3574 VMA_HEAVY_ASSERT(pBlock->Validate());
3576 CalcAllocationStatInfo(allocationStatInfo, *pBlock);
3577 VmaAddStatInfo(pStats->
total, allocationStatInfo);
3578 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
3579 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
3586 class VmaDefragmentator
3589 const VkAllocationCallbacks* m_pAllocationCallbacks;
3590 VkDeviceSize m_BufferImageGranularity;
3591 uint32_t m_MemTypeIndex;
3592 VMA_BLOCK_VECTOR_TYPE m_BlockVectorType;
3593 VkDeviceSize m_BytesMoved;
3594 uint32_t m_AllocationsMoved;
3596 struct AllocationInfo
3598 VmaAllocation m_hAllocation;
3599 VkBool32* m_pChanged;
3602 m_hAllocation(VK_NULL_HANDLE),
3603 m_pChanged(VMA_NULL)
3608 struct AllocationInfoSizeGreater
3610 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3612 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3617 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3622 bool m_HasNonMovableAllocations;
3623 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3625 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3627 m_HasNonMovableAllocations(true),
3628 m_Allocations(pAllocationCallbacks),
3629 m_pMappedDataForDefragmentation(VMA_NULL)
3633 void CalcHasNonMovableAllocations()
3635 const size_t blockAllocCount =
3636 m_pBlock->m_Suballocations.size() - m_pBlock->m_FreeCount;
3637 const size_t defragmentAllocCount = m_Allocations.size();
3638 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3641 void SortAllocationsBySizeDescecnding()
3643 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3646 VkResult EnsureMapping(VkDevice hDevice,
void** ppMappedData)
3649 if(m_pMappedDataForDefragmentation)
3651 *ppMappedData = m_pMappedDataForDefragmentation;
3656 if(m_pBlock->m_PersistentMap)
3658 VMA_ASSERT(m_pBlock->m_pMappedData != VMA_NULL);
3659 *ppMappedData = m_pBlock->m_pMappedData;
3664 VkResult res = vkMapMemory(hDevice, m_pBlock->m_hMemory, 0, VK_WHOLE_SIZE, 0, &m_pMappedDataForDefragmentation);
3665 *ppMappedData = m_pMappedDataForDefragmentation;
3669 void Unmap(VkDevice hDevice)
3671 if(m_pMappedDataForDefragmentation != VMA_NULL)
3673 vkUnmapMemory(hDevice, m_pBlock->m_hMemory);
3679 void* m_pMappedDataForDefragmentation;
3682 struct BlockPointerLess
3684 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaBlock* pRhsBlock)
const 3686 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3688 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3690 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3696 struct BlockInfoCompareMoveDestination
3698 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3700 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3704 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3708 if(pLhsBlockInfo->m_pBlock->m_SumFreeSize < pRhsBlockInfo->m_pBlock->m_SumFreeSize)
3716 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3717 BlockInfoVector m_Blocks;
3719 VkResult DefragmentRound(
3720 VkDeviceSize maxBytesToMove,
3721 uint32_t maxAllocationsToMove);
3723 static bool MoveMakesSense(
3724 size_t dstBlockIndex, VkDeviceSize dstOffset,
3725 size_t srcBlockIndex, VkDeviceSize srcOffset);
3730 const VkAllocationCallbacks* pAllocationCallbacks,
3731 VkDeviceSize bufferImageGranularity,
3732 uint32_t memTypeIndex,
3733 VMA_BLOCK_VECTOR_TYPE blockVectorType);
3735 ~VmaDefragmentator();
3737 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3738 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3740 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3742 VkResult Defragment(
3743 VmaBlockVector* pBlockVector,
3744 VkDeviceSize maxBytesToMove,
3745 uint32_t maxAllocationsToMove);
3748 VmaDefragmentator::VmaDefragmentator(
3750 const VkAllocationCallbacks* pAllocationCallbacks,
3751 VkDeviceSize bufferImageGranularity,
3752 uint32_t memTypeIndex,
3753 VMA_BLOCK_VECTOR_TYPE blockVectorType) :
3755 m_pAllocationCallbacks(pAllocationCallbacks),
3756 m_BufferImageGranularity(bufferImageGranularity),
3757 m_MemTypeIndex(memTypeIndex),
3758 m_BlockVectorType(blockVectorType),
3760 m_AllocationsMoved(0),
3761 m_Allocations(VmaStlAllocator<AllocationInfo>(pAllocationCallbacks)),
3762 m_Blocks(VmaStlAllocator<BlockInfo*>(pAllocationCallbacks))
3766 VmaDefragmentator::~VmaDefragmentator()
3768 for(
size_t i = m_Blocks.size(); i--; )
3770 vma_delete(m_pAllocationCallbacks, m_Blocks[i]);
3774 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
3776 AllocationInfo allocInfo;
3777 allocInfo.m_hAllocation = hAlloc;
3778 allocInfo.m_pChanged = pChanged;
3779 m_Allocations.push_back(allocInfo);
3782 VkResult VmaDefragmentator::DefragmentRound(
3783 VkDeviceSize maxBytesToMove,
3784 uint32_t maxAllocationsToMove)
3786 if(m_Blocks.empty())
3791 size_t srcBlockIndex = m_Blocks.size() - 1;
3792 size_t srcAllocIndex = SIZE_MAX;
3798 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
3800 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
3803 if(srcBlockIndex == 0)
3810 srcAllocIndex = SIZE_MAX;
3815 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
3819 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
3820 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
3822 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
3823 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
3824 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
3825 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
3828 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
3830 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
3831 VmaAllocationRequest dstAllocRequest;
3832 if(pDstBlockInfo->m_pBlock->CreateAllocationRequest(
3833 m_BufferImageGranularity,
3837 &dstAllocRequest) &&
3839 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
3842 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
3843 (m_BytesMoved + size > maxBytesToMove))
3845 return VK_INCOMPLETE;
3848 void* pDstMappedData = VMA_NULL;
3849 VkResult res = pDstBlockInfo->EnsureMapping(m_hDevice, &pDstMappedData);
3850 if(res != VK_SUCCESS)
3855 void* pSrcMappedData = VMA_NULL;
3856 res = pSrcBlockInfo->EnsureMapping(m_hDevice, &pSrcMappedData);
3857 if(res != VK_SUCCESS)
3864 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
3865 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
3868 pDstBlockInfo->m_pBlock->Alloc(dstAllocRequest, suballocType, size);
3869 pSrcBlockInfo->m_pBlock->Free(allocInfo.m_hAllocation);
3871 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
3873 if(allocInfo.m_pChanged != VMA_NULL)
3875 *allocInfo.m_pChanged = VK_TRUE;
3878 ++m_AllocationsMoved;
3879 m_BytesMoved += size;
3881 VectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
3889 if(srcAllocIndex > 0)
3895 if(srcBlockIndex > 0)
3898 srcAllocIndex = SIZE_MAX;
3908 VkResult VmaDefragmentator::Defragment(
3909 VmaBlockVector* pBlockVector,
3910 VkDeviceSize maxBytesToMove,
3911 uint32_t maxAllocationsToMove)
3913 if(m_Allocations.empty())
3919 const size_t blockCount = pBlockVector->m_Blocks.size();
3920 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
3922 BlockInfo* pBlockInfo = vma_new(m_pAllocationCallbacks, BlockInfo)(m_pAllocationCallbacks);
3923 pBlockInfo->m_pBlock = pBlockVector->m_Blocks[blockIndex];
3924 m_Blocks.push_back(pBlockInfo);
3928 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
3931 for(
size_t allocIndex = 0, allocCount = m_Allocations.size(); allocIndex < allocCount; ++allocIndex)
3933 AllocationInfo& allocInfo = m_Allocations[allocIndex];
3934 VmaBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
3935 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
3936 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
3938 (*it)->m_Allocations.push_back(allocInfo);
3945 m_Allocations.clear();
3947 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
3949 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
3950 pBlockInfo->CalcHasNonMovableAllocations();
3951 pBlockInfo->SortAllocationsBySizeDescecnding();
3955 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
3958 VkResult result = VK_SUCCESS;
3959 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
3961 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
3965 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
3967 m_Blocks[blockIndex]->Unmap(m_hDevice);
3973 bool VmaDefragmentator::MoveMakesSense(
3974 size_t dstBlockIndex, VkDeviceSize dstOffset,
3975 size_t srcBlockIndex, VkDeviceSize srcOffset)
3977 if(dstBlockIndex < srcBlockIndex)
3981 if(dstBlockIndex > srcBlockIndex)
3985 if(dstOffset < srcOffset)
3997 m_PhysicalDevice(pCreateInfo->physicalDevice),
3998 m_hDevice(pCreateInfo->device),
3999 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
4000 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
4001 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
4002 m_PreferredLargeHeapBlockSize(0),
4003 m_PreferredSmallHeapBlockSize(0),
4004 m_UnmapPersistentlyMappedMemoryCounter(0)
4008 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
4009 memset(&m_MemProps, 0,
sizeof(m_MemProps));
4010 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
4012 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
4013 memset(&m_HasEmptyBlock, 0,
sizeof(m_HasEmptyBlock));
4014 memset(&m_pOwnAllocations, 0,
sizeof(m_pOwnAllocations));
4027 vkGetPhysicalDeviceProperties(m_PhysicalDevice, &m_PhysicalDeviceProperties);
4028 vkGetPhysicalDeviceMemoryProperties(m_PhysicalDevice, &m_MemProps);
4030 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
4032 for(
size_t j = 0; j < VMA_BLOCK_VECTOR_TYPE_COUNT; ++j)
4034 m_pBlockVectors[i][j] = vma_new(
this, VmaBlockVector)(
this);
4035 m_pOwnAllocations[i][j] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
4040 VmaAllocator_T::~VmaAllocator_T()
4042 for(
size_t i = GetMemoryTypeCount(); i--; )
4044 for(
size_t j = VMA_BLOCK_VECTOR_TYPE_COUNT; j--; )
4046 vma_delete(
this, m_pOwnAllocations[i][j]);
4047 vma_delete(
this, m_pBlockVectors[i][j]);
4052 VkDeviceSize VmaAllocator_T::GetPreferredBlockSize(uint32_t memTypeIndex)
const 4054 VkDeviceSize heapSize = m_MemProps.memoryHeaps[m_MemProps.memoryTypes[memTypeIndex].heapIndex].size;
4055 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
4056 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
4059 VkResult VmaAllocator_T::AllocateMemoryOfType(
4060 const VkMemoryRequirements& vkMemReq,
4062 uint32_t memTypeIndex,
4063 VmaSuballocationType suballocType,
4064 VmaAllocation* pAllocation)
4066 VMA_ASSERT(pAllocation != VMA_NULL);
4067 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
4069 const VkDeviceSize preferredBlockSize = GetPreferredBlockSize(memTypeIndex);
4071 const bool ownMemory =
4073 VMA_DEBUG_ALWAYS_OWN_MEMORY ||
4075 vkMemReq.size > preferredBlockSize / 2);
4081 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4085 return AllocateOwnMemory(
4096 uint32_t blockVectorType = VmaMemoryRequirementFlagsToBlockVectorType(vmaMemReq.
flags);
4098 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4099 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
4100 VMA_ASSERT(blockVector);
4104 for(
size_t allocIndex = 0; allocIndex < blockVector->m_Blocks.size(); ++allocIndex )
4106 VmaBlock*
const pBlock = blockVector->m_Blocks[allocIndex];
4108 VmaAllocationRequest allocRequest = {};
4110 if(pBlock->CreateAllocationRequest(
4111 GetBufferImageGranularity(),
4118 if(pBlock->IsEmpty())
4120 m_HasEmptyBlock[memTypeIndex] =
false;
4123 pBlock->Alloc(allocRequest, suballocType, vkMemReq.size);
4124 *pAllocation = vma_new(
this, VmaAllocation_T)();
4125 (*pAllocation)->InitBlockAllocation(
4127 allocRequest.offset,
4132 VMA_HEAVY_ASSERT(pBlock->Validate());
4133 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)allocIndex);
4141 VMA_DEBUG_LOG(
" FAILED due to VMA_MEMORY_REQUIREMENT_NEVER_ALLOCATE_BIT");
4142 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4147 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
4148 allocInfo.memoryTypeIndex = memTypeIndex;
4149 allocInfo.allocationSize = preferredBlockSize;
4150 VkDeviceMemory mem = VK_NULL_HANDLE;
4151 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
4155 allocInfo.allocationSize /= 2;
4156 if(allocInfo.allocationSize >= vkMemReq.size)
4158 res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
4162 allocInfo.allocationSize /= 2;
4163 if(allocInfo.allocationSize >= vkMemReq.size)
4165 res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &mem);
4173 res = AllocateOwnMemory(
4180 if(res == VK_SUCCESS)
4183 VMA_DEBUG_LOG(
" Allocated as OwnMemory");
4189 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
4197 void* pMappedData = VMA_NULL;
4199 if(persistentMap && m_UnmapPersistentlyMappedMemoryCounter == 0)
4201 res = vkMapMemory(m_hDevice, mem, 0, VK_WHOLE_SIZE, 0, &pMappedData);
4204 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
4205 vkFreeMemory(m_hDevice, mem, GetAllocationCallbacks());
4211 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
4213 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, memTypeIndex, mem, allocInfo.allocationSize);
4217 VmaBlock*
const pBlock = vma_new(
this, VmaBlock)(
this);
4220 (VMA_BLOCK_VECTOR_TYPE)blockVectorType,
4222 allocInfo.allocationSize,
4226 blockVector->m_Blocks.push_back(pBlock);
4229 VmaAllocationRequest allocRequest = {};
4230 allocRequest.freeSuballocationItem = pBlock->m_Suballocations.begin();
4231 allocRequest.offset = 0;
4232 pBlock->Alloc(allocRequest, suballocType, vkMemReq.size);
4233 *pAllocation = vma_new(
this, VmaAllocation_T)();
4234 (*pAllocation)->InitBlockAllocation(
4236 allocRequest.offset,
4241 VMA_HEAVY_ASSERT(pBlock->Validate());
4242 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
4248 VkResult VmaAllocator_T::AllocateOwnMemory(
4250 VmaSuballocationType suballocType,
4251 uint32_t memTypeIndex,
4254 VmaAllocation* pAllocation)
4256 VMA_ASSERT(pAllocation);
4258 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
4259 allocInfo.memoryTypeIndex = memTypeIndex;
4260 allocInfo.allocationSize = size;
4263 VkDeviceMemory hMemory = VK_NULL_HANDLE;
4264 VkResult res = vkAllocateMemory(m_hDevice, &allocInfo, GetAllocationCallbacks(), &hMemory);
4267 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
4271 void* pMappedData =
nullptr;
4274 if(m_UnmapPersistentlyMappedMemoryCounter == 0)
4276 res = vkMapMemory(m_hDevice, hMemory, 0, VK_WHOLE_SIZE, 0, &pMappedData);
4279 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
4280 vkFreeMemory(m_hDevice, hMemory, GetAllocationCallbacks());
4287 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
4289 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, memTypeIndex, hMemory, size);
4292 *pAllocation = vma_new(
this, VmaAllocation_T)();
4293 (*pAllocation)->InitOwnAllocation(memTypeIndex, hMemory, suballocType, map, pMappedData, size, pUserData);
4297 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4298 AllocationVectorType* pOwnAllocations = m_pOwnAllocations[memTypeIndex][map ? VMA_BLOCK_VECTOR_TYPE_MAPPED : VMA_BLOCK_VECTOR_TYPE_UNMAPPED];
4299 VMA_ASSERT(pOwnAllocations);
4300 VmaAllocation*
const pOwnAllocationsBeg = pOwnAllocations->data();
4301 VmaAllocation*
const pOwnAllocationsEnd = pOwnAllocationsBeg + pOwnAllocations->size();
4302 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4306 VmaPointerLess()) - pOwnAllocationsBeg;
4307 VectorInsert(*pOwnAllocations, indexToInsert, *pAllocation);
4310 VMA_DEBUG_LOG(
" Allocated OwnMemory MemoryTypeIndex=#%u", memTypeIndex);
4315 VkResult VmaAllocator_T::AllocateMemory(
4316 const VkMemoryRequirements& vkMemReq,
4318 VmaSuballocationType suballocType,
4319 VmaAllocation* pAllocation)
4324 VMA_ASSERT(0 &&
"Specifying VMA_MEMORY_REQUIREMENT_OWN_MEMORY_BIT together with VMA_MEMORY_REQUIREMENT_NEVER_ALLOCATE_BIT makes no sense.");
4325 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4329 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
4330 uint32_t memTypeIndex = UINT32_MAX;
4332 if(res == VK_SUCCESS)
4334 res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pAllocation);
4336 if(res == VK_SUCCESS)
4346 memoryTypeBits &= ~(1u << memTypeIndex);
4349 if(res == VK_SUCCESS)
4351 res = AllocateMemoryOfType(vkMemReq, vmaMemReq, memTypeIndex, suballocType, pAllocation);
4353 if(res == VK_SUCCESS)
4363 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
4373 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
4375 VMA_ASSERT(allocation);
4377 if(allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK)
4379 VmaBlock* pBlockToDelete = VMA_NULL;
4381 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
4382 const VMA_BLOCK_VECTOR_TYPE blockVectorType = allocation->GetBlockVectorType();
4384 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4386 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
4387 VmaBlock* pBlock = allocation->GetBlock();
4389 pBlock->Free(allocation);
4390 VMA_HEAVY_ASSERT(pBlock->Validate());
4392 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
4395 if(pBlock->IsEmpty())
4398 if(m_HasEmptyBlock[memTypeIndex])
4400 pBlockToDelete = pBlock;
4401 pBlockVector->Remove(pBlock);
4406 m_HasEmptyBlock[memTypeIndex] =
true;
4410 pBlockVector->IncrementallySortBlocks();
4414 if(pBlockToDelete != VMA_NULL)
4416 VMA_DEBUG_LOG(
" Deleted empty allocation");
4417 pBlockToDelete->Destroy(
this);
4418 vma_delete(
this, pBlockToDelete);
4421 vma_delete(
this, allocation);
4425 FreeOwnMemory(allocation);
4429 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
4431 InitStatInfo(pStats->
total);
4432 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
4434 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
4437 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
4439 VmaMutexLock allocationsLock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4440 const uint32_t heapIndex = m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4441 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
4443 const VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
4444 VMA_ASSERT(pBlockVector);
4445 pBlockVector->AddStats(pStats, memTypeIndex, heapIndex);
4449 VmaPostprocessCalcStatInfo(pStats->
total);
4450 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
4451 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
4452 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
4453 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
4456 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
4458 void VmaAllocator_T::UnmapPersistentlyMappedMemory()
4460 if(m_UnmapPersistentlyMappedMemoryCounter++ == 0)
4462 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
4464 for(
size_t memTypeIndex = m_MemProps.memoryTypeCount; memTypeIndex--; )
4466 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
4467 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
4468 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
4472 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4473 AllocationVectorType* pOwnAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
4474 for(
size_t ownAllocIndex = pOwnAllocationsVector->size(); ownAllocIndex--; )
4476 VmaAllocation hAlloc = (*pOwnAllocationsVector)[ownAllocIndex];
4477 hAlloc->OwnAllocUnmapPersistentlyMappedMemory(m_hDevice);
4483 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4484 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
4485 pBlockVector->UnmapPersistentlyMappedMemory();
4493 VkResult VmaAllocator_T::MapPersistentlyMappedMemory()
4495 VMA_ASSERT(m_UnmapPersistentlyMappedMemoryCounter > 0);
4496 if(--m_UnmapPersistentlyMappedMemoryCounter == 0)
4498 VkResult finalResult = VK_SUCCESS;
4499 if(m_PhysicalDeviceProperties.vendorID == VMA_VENDOR_ID_AMD)
4501 for(
size_t memTypeIndex = 0; memTypeIndex < m_MemProps.memoryTypeCount; ++memTypeIndex)
4503 const VkMemoryPropertyFlags memFlags = m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
4504 if((memFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0 &&
4505 (memFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
4509 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4510 AllocationVectorType* pAllocationsVector = m_pOwnAllocations[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
4511 for(
size_t ownAllocIndex = 0, ownAllocCount = pAllocationsVector->size(); ownAllocIndex < ownAllocCount; ++ownAllocIndex)
4513 VmaAllocation hAlloc = (*pAllocationsVector)[ownAllocIndex];
4514 hAlloc->OwnAllocMapPersistentlyMappedMemory(m_hDevice);
4520 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4521 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][VMA_BLOCK_VECTOR_TYPE_MAPPED];
4522 VkResult localResult = pBlockVector->MapPersistentlyMappedMemory();
4523 if(localResult != VK_SUCCESS)
4525 finalResult = localResult;
4537 VkResult VmaAllocator_T::Defragment(
4538 VmaAllocation* pAllocations,
4539 size_t allocationCount,
4540 VkBool32* pAllocationsChanged,
4544 if(pAllocationsChanged != VMA_NULL)
4546 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
4548 if(pDefragmentationStats != VMA_NULL)
4550 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
4553 if(m_UnmapPersistentlyMappedMemoryCounter > 0)
4555 VMA_DEBUG_LOG(
"ERROR: Cannot defragment when inside vmaUnmapPersistentlyMappedMemory.");
4556 return VK_ERROR_MEMORY_MAP_FAILED;
4560 const VkDeviceSize bufferImageGranularity = GetBufferImageGranularity();
4561 VmaDefragmentator* pDefragmentators[VK_MAX_MEMORY_TYPES][VMA_BLOCK_VECTOR_TYPE_COUNT];
4562 memset(pDefragmentators, 0,
sizeof(pDefragmentators));
4563 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
4566 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
4568 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
4570 pDefragmentators[memTypeIndex][blockVectorType] = vma_new(
this, VmaDefragmentator)(
4572 GetAllocationCallbacks(),
4573 bufferImageGranularity,
4575 (VMA_BLOCK_VECTOR_TYPE)blockVectorType);
4581 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
4583 VmaAllocation hAlloc = pAllocations[allocIndex];
4585 if(hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK)
4587 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
4589 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
4591 const VMA_BLOCK_VECTOR_TYPE blockVectorType = hAlloc->GetBlockVectorType();
4592 VkBool32* pChanged = (pAllocationsChanged != VMA_NULL) ?
4593 &pAllocationsChanged[allocIndex] : VMA_NULL;
4594 pDefragmentators[memTypeIndex][blockVectorType]->AddAllocation(hAlloc, pChanged);
4601 VkResult result = VK_SUCCESS;
4604 VkDeviceSize maxBytesToMove = SIZE_MAX;
4605 uint32_t maxAllocationsToMove = UINT32_MAX;
4606 if(pDefragmentationInfo != VMA_NULL)
4611 for(uint32_t memTypeIndex = 0;
4612 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
4616 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
4618 VmaMutexLock lock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4620 for(uint32_t blockVectorType = 0;
4621 (blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT) && (result == VK_SUCCESS);
4624 VmaBlockVector* pBlockVector = m_pBlockVectors[memTypeIndex][blockVectorType];
4627 result = pDefragmentators[memTypeIndex][blockVectorType]->Defragment(pBlockVector, maxBytesToMove, maxAllocationsToMove);
4630 if(pDefragmentationStats != VMA_NULL)
4632 const VkDeviceSize
bytesMoved = pDefragmentators[memTypeIndex][blockVectorType]->GetBytesMoved();
4633 const uint32_t
allocationsMoved = pDefragmentators[memTypeIndex][blockVectorType]->GetAllocationsMoved();
4636 VMA_ASSERT(bytesMoved <= maxBytesToMove);
4637 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
4643 for(
size_t blockIndex = pBlockVector->m_Blocks.size(); blockIndex--; )
4645 VmaBlock* pBlock = pBlockVector->m_Blocks[blockIndex];
4646 if(pBlock->IsEmpty())
4648 if(pDefragmentationStats != VMA_NULL)
4651 pDefragmentationStats->
bytesFreed += pBlock->m_Size;
4654 VectorRemove(pBlockVector->m_Blocks, blockIndex);
4655 pBlock->Destroy(
this);
4656 vma_delete(
this, pBlock);
4661 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_COUNT - 1)
4663 m_HasEmptyBlock[memTypeIndex] =
false;
4670 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
4672 for(
size_t blockVectorType = VMA_BLOCK_VECTOR_TYPE_COUNT; blockVectorType--; )
4674 vma_delete(
this, pDefragmentators[memTypeIndex][blockVectorType]);
4681 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
4683 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
4684 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
4685 pAllocationInfo->
offset = hAllocation->GetOffset();
4686 pAllocationInfo->
size = hAllocation->GetSize();
4687 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
4688 pAllocationInfo->
pUserData = hAllocation->GetUserData();
4691 void VmaAllocator_T::FreeOwnMemory(VmaAllocation allocation)
4693 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_OWN);
4695 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
4697 VmaMutexLock lock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4698 AllocationVectorType*
const pOwnAllocations = m_pOwnAllocations[memTypeIndex][allocation->GetBlockVectorType()];
4699 VMA_ASSERT(pOwnAllocations);
4700 VmaAllocation*
const pOwnAllocationsBeg = pOwnAllocations->data();
4701 VmaAllocation*
const pOwnAllocationsEnd = pOwnAllocationsBeg + pOwnAllocations->size();
4702 VmaAllocation*
const pOwnAllocationIt = VmaBinaryFindFirstNotLess(
4707 if(pOwnAllocationIt != pOwnAllocationsEnd)
4709 const size_t ownAllocationIndex = pOwnAllocationIt - pOwnAllocationsBeg;
4710 VectorRemove(*pOwnAllocations, ownAllocationIndex);
4718 VkDeviceMemory hMemory = allocation->GetMemory();
4721 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
4723 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memTypeIndex, hMemory, allocation->GetSize());
4726 if(allocation->GetMappedData() != VMA_NULL)
4728 vkUnmapMemory(m_hDevice, hMemory);
4731 vkFreeMemory(m_hDevice, hMemory, GetAllocationCallbacks());
4733 VMA_DEBUG_LOG(
" Freed OwnMemory MemoryTypeIndex=%u", memTypeIndex);
4735 vma_delete(
this, allocation);
4738 #if VMA_STATS_STRING_ENABLED 4740 void VmaAllocator_T::PrintDetailedMap(VmaStringBuilder& sb)
4742 bool ownAllocationsStarted =
false;
4743 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
4745 VmaMutexLock ownAllocationsLock(m_OwnAllocationsMutex[memTypeIndex], m_UseMutex);
4746 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
4748 AllocationVectorType*
const pOwnAllocVector = m_pOwnAllocations[memTypeIndex][blockVectorType];
4749 VMA_ASSERT(pOwnAllocVector);
4750 if(pOwnAllocVector->empty() ==
false)
4752 if(ownAllocationsStarted)
4754 sb.Add(
",\n\t\"Type ");
4758 sb.Add(
",\n\"OwnAllocations\": {\n\t\"Type ");
4759 ownAllocationsStarted =
true;
4761 sb.AddNumber(memTypeIndex);
4762 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
4768 for(
size_t i = 0; i < pOwnAllocVector->size(); ++i)
4770 const VmaAllocation hAlloc = (*pOwnAllocVector)[i];
4773 sb.Add(
",\n\t\t{ \"Size\": ");
4777 sb.Add(
"\n\t\t{ \"Size\": ");
4779 sb.AddNumber(hAlloc->GetSize());
4780 sb.Add(
", \"Type\": ");
4781 sb.AddString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
4789 if(ownAllocationsStarted)
4795 bool allocationsStarted =
false;
4796 for(
size_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
4798 VmaMutexLock globalAllocationsLock(m_BlocksMutex[memTypeIndex], m_UseMutex);
4799 for(uint32_t blockVectorType = 0; blockVectorType < VMA_BLOCK_VECTOR_TYPE_COUNT; ++blockVectorType)
4801 if(m_pBlockVectors[memTypeIndex][blockVectorType]->IsEmpty() ==
false)
4803 if(allocationsStarted)
4805 sb.Add(
",\n\t\"Type ");
4809 sb.Add(
",\n\"Allocations\": {\n\t\"Type ");
4810 allocationsStarted =
true;
4812 sb.AddNumber(memTypeIndex);
4813 if(blockVectorType == VMA_BLOCK_VECTOR_TYPE_MAPPED)
4819 m_pBlockVectors[memTypeIndex][blockVectorType]->PrintDetailedMap(sb);
4825 if(allocationsStarted)
4832 #endif // #if VMA_STATS_STRING_ENABLED 4834 static VkResult AllocateMemoryForImage(
4835 VmaAllocator allocator,
4838 VmaSuballocationType suballocType,
4839 VmaAllocation* pAllocation)
4841 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pMemoryRequirements && pAllocation);
4843 VkMemoryRequirements vkMemReq = {};
4844 vkGetImageMemoryRequirements(allocator->m_hDevice, image, &vkMemReq);
4846 return allocator->AllocateMemory(
4848 *pMemoryRequirements,
4858 VmaAllocator* pAllocator)
4860 VMA_ASSERT(pCreateInfo && pAllocator);
4861 VMA_DEBUG_LOG(
"vmaCreateAllocator");
4867 VmaAllocator allocator)
4869 if(allocator != VK_NULL_HANDLE)
4871 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
4872 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
4873 vma_delete(&allocationCallbacks, allocator);
4878 VmaAllocator allocator,
4879 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
4881 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
4882 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
4886 VmaAllocator allocator,
4887 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
4889 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
4890 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
4894 VmaAllocator allocator,
4895 uint32_t memoryTypeIndex,
4896 VkMemoryPropertyFlags* pFlags)
4898 VMA_ASSERT(allocator && pFlags);
4899 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
4900 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
4904 VmaAllocator allocator,
4907 VMA_ASSERT(allocator && pStats);
4908 VMA_DEBUG_GLOBAL_MUTEX_LOCK
4909 allocator->CalculateStats(pStats);
4912 #if VMA_STATS_STRING_ENABLED 4915 VmaAllocator allocator,
4916 char** ppStatsString,
4917 VkBool32 detailedMap)
4919 VMA_ASSERT(allocator && ppStatsString);
4920 VMA_DEBUG_GLOBAL_MUTEX_LOCK
4922 VmaStringBuilder sb(allocator);
4925 allocator->CalculateStats(&stats);
4927 sb.Add(
"{\n\"Total\": ");
4928 VmaPrintStatInfo(sb, stats.
total);
4930 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
4932 sb.Add(
",\n\"Heap ");
4933 sb.AddNumber(heapIndex);
4934 sb.Add(
"\": {\n\t\"Size\": ");
4935 sb.AddNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
4936 sb.Add(
",\n\t\"Flags\": ");
4937 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
4939 sb.AddString(
"DEVICE_LOCAL");
4947 sb.Add(
",\n\t\"Stats:\": ");
4948 VmaPrintStatInfo(sb, stats.
memoryHeap[heapIndex]);
4951 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
4953 if(allocator->m_MemProps.memoryTypes[typeIndex].heapIndex == heapIndex)
4955 sb.Add(
",\n\t\"Type ");
4956 sb.AddNumber(typeIndex);
4957 sb.Add(
"\": {\n\t\t\"Flags\": \"");
4958 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
4959 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
4961 sb.Add(
" DEVICE_LOCAL");
4963 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
4965 sb.Add(
" HOST_VISIBLE");
4967 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
4969 sb.Add(
" HOST_COHERENT");
4971 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
4973 sb.Add(
" HOST_CACHED");
4975 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
4977 sb.Add(
" LAZILY_ALLOCATED");
4982 sb.Add(
",\n\t\t\"Stats\": ");
4983 VmaPrintStatInfo(sb, stats.
memoryType[typeIndex]);
4990 if(detailedMap == VK_TRUE)
4992 allocator->PrintDetailedMap(sb);
4997 const size_t len = sb.GetLength();
4998 char*
const pChars = vma_new_array(allocator,
char, len + 1);
5001 memcpy(pChars, sb.GetData(), len);
5004 *ppStatsString = pChars;
5008 VmaAllocator allocator,
5011 if(pStatsString != VMA_NULL)
5013 VMA_ASSERT(allocator);
5014 size_t len = strlen(pStatsString);
5015 vma_delete_array(allocator, pStatsString, len + 1);
5019 #endif // #if VMA_STATS_STRING_ENABLED 5024 VmaAllocator allocator,
5025 uint32_t memoryTypeBits,
5027 uint32_t* pMemoryTypeIndex)
5029 VMA_ASSERT(allocator != VK_NULL_HANDLE);
5030 VMA_ASSERT(pMemoryRequirements != VMA_NULL);
5031 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
5033 uint32_t requiredFlags = pMemoryRequirements->
requiredFlags;
5035 if(preferredFlags == 0)
5037 preferredFlags = requiredFlags;
5040 VMA_ASSERT((requiredFlags & ~preferredFlags) == 0);
5043 switch(pMemoryRequirements->
usage)
5048 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
5051 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
5054 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5055 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
5058 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5059 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
5067 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5070 *pMemoryTypeIndex = UINT32_MAX;
5071 uint32_t minCost = UINT32_MAX;
5072 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
5073 memTypeIndex < allocator->GetMemoryTypeCount();
5074 ++memTypeIndex, memTypeBit <<= 1)
5077 if((memTypeBit & memoryTypeBits) != 0)
5079 const VkMemoryPropertyFlags currFlags =
5080 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
5082 if((requiredFlags & ~currFlags) == 0)
5085 uint32_t currCost = CountBitsSet(preferredFlags & ~currFlags);
5087 if(currCost < minCost)
5089 *pMemoryTypeIndex = memTypeIndex;
5099 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
5103 VmaAllocator allocator,
5104 const VkMemoryRequirements* pVkMemoryRequirements,
5106 VmaAllocation* pAllocation,
5109 VMA_ASSERT(allocator && pVkMemoryRequirements && pVmaMemoryRequirements && pAllocation);
5111 VMA_DEBUG_LOG(
"vmaAllocateMemory");
5113 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5115 return allocator->AllocateMemory(
5116 *pVkMemoryRequirements,
5117 *pVmaMemoryRequirements,
5118 VMA_SUBALLOCATION_TYPE_UNKNOWN,
5123 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5128 VmaAllocator allocator,
5131 VmaAllocation* pAllocation,
5134 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pMemoryRequirements && pAllocation);
5136 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
5138 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5140 VkMemoryRequirements vkMemReq = {};
5141 vkGetBufferMemoryRequirements(allocator->m_hDevice, buffer, &vkMemReq);
5143 return allocator->AllocateMemory(
5145 *pMemoryRequirements,
5146 VMA_SUBALLOCATION_TYPE_BUFFER,
5151 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5156 VmaAllocator allocator,
5159 VmaAllocation* pAllocation,
5162 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pMemoryRequirements && pAllocation);
5164 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
5166 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5168 return AllocateMemoryForImage(
5171 pMemoryRequirements,
5172 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
5177 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5182 VmaAllocator allocator,
5183 VmaAllocation allocation)
5185 VMA_ASSERT(allocator && allocation);
5187 VMA_DEBUG_LOG(
"vmaFreeMemory");
5189 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5191 allocator->FreeMemory(allocation);
5195 VmaAllocator allocator,
5196 VmaAllocation allocation,
5199 VMA_ASSERT(allocator && allocation && pAllocationInfo);
5201 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5203 allocator->GetAllocationInfo(allocation, pAllocationInfo);
5207 VmaAllocator allocator,
5208 VmaAllocation allocation,
5211 VMA_ASSERT(allocator && allocation);
5213 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5215 allocation->SetUserData(pUserData);
5219 VmaAllocator allocator,
5220 VmaAllocation allocation,
5223 VMA_ASSERT(allocator && allocation && ppData);
5225 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5227 return vkMapMemory(allocator->m_hDevice, allocation->GetMemory(),
5228 allocation->GetOffset(), allocation->GetSize(), 0, ppData);
5232 VmaAllocator allocator,
5233 VmaAllocation allocation)
5235 VMA_ASSERT(allocator && allocation);
5237 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5239 vkUnmapMemory(allocator->m_hDevice, allocation->GetMemory());
5244 VMA_ASSERT(allocator);
5246 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5248 allocator->UnmapPersistentlyMappedMemory();
5253 VMA_ASSERT(allocator);
5255 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5257 return allocator->MapPersistentlyMappedMemory();
5261 VmaAllocator allocator,
5262 VmaAllocation* pAllocations,
5263 size_t allocationCount,
5264 VkBool32* pAllocationsChanged,
5268 VMA_ASSERT(allocator && pAllocations);
5270 VMA_DEBUG_LOG(
"vmaDefragment");
5272 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5274 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
5278 VmaAllocator allocator,
5279 const VkBufferCreateInfo* pCreateInfo,
5282 VmaAllocation* pAllocation,
5285 VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements && pBuffer && pAllocation);
5287 VMA_DEBUG_LOG(
"vmaCreateBuffer");
5289 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5291 *pBuffer = VK_NULL_HANDLE;
5292 *pAllocation = VK_NULL_HANDLE;
5295 VkResult res = vkCreateBuffer(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pBuffer);
5299 VkMemoryRequirements vkMemReq = {};
5300 vkGetBufferMemoryRequirements(allocator->m_hDevice, *pBuffer, &vkMemReq);
5303 res = allocator->AllocateMemory(
5305 *pMemoryRequirements,
5306 VMA_SUBALLOCATION_TYPE_BUFFER,
5311 res = vkBindBufferMemory(allocator->m_hDevice, *pBuffer, (*pAllocation)->GetMemory(), (*pAllocation)->GetOffset());
5315 if(pAllocationInfo != VMA_NULL)
5317 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5321 allocator->FreeMemory(*pAllocation);
5322 *pAllocation = VK_NULL_HANDLE;
5325 vkDestroyBuffer(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
5326 *pBuffer = VK_NULL_HANDLE;
5333 VmaAllocator allocator,
5335 VmaAllocation allocation)
5337 if(buffer != VK_NULL_HANDLE)
5339 VMA_ASSERT(allocator);
5341 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
5343 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5345 vkDestroyBuffer(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
5347 allocator->FreeMemory(allocation);
5352 VmaAllocator allocator,
5353 const VkImageCreateInfo* pCreateInfo,
5356 VmaAllocation* pAllocation,
5359 VMA_ASSERT(allocator && pCreateInfo && pMemoryRequirements && pImage && pAllocation);
5361 VMA_DEBUG_LOG(
"vmaCreateImage");
5363 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5366 VkResult res = vkCreateImage(allocator->m_hDevice, pCreateInfo, allocator->GetAllocationCallbacks(), pImage);
5369 VkMappedMemoryRange mem = {};
5370 VmaSuballocationType suballocType = pCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
5371 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
5372 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
5375 res = AllocateMemoryForImage(allocator, *pImage, pMemoryRequirements, suballocType, pAllocation);
5379 res = vkBindImageMemory(allocator->m_hDevice, *pImage, (*pAllocation)->GetMemory(), (*pAllocation)->GetOffset());
5383 if(pAllocationInfo != VMA_NULL)
5385 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
5389 allocator->FreeMemory(*pAllocation);
5390 *pAllocation = VK_NULL_HANDLE;
5393 vkDestroyImage(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
5394 *pImage = VK_NULL_HANDLE;
5401 VmaAllocator allocator,
5403 VmaAllocation allocation)
5405 if(image != VK_NULL_HANDLE)
5407 VMA_ASSERT(allocator);
5409 VMA_DEBUG_LOG(
"vmaDestroyImage");
5411 VMA_DEBUG_GLOBAL_MUTEX_LOCK
5413 vkDestroyImage(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
5415 allocator->FreeMemory(allocation);
5419 #endif // #ifdef VMA_IMPLEMENTATION VmaMemoryRequirementFlagBits
Flags to be passed as VmaMemoryRequirements::flags.
Definition: vk_mem_alloc.h:336
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:345
struct VmaMemoryRequirements VmaMemoryRequirements
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:214
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pCreateInfo, const VmaMemoryRequirements *pMemoryRequirements, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Memory will be used for writing on device and readback on host.
Definition: vk_mem_alloc.h:331
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:374
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:431
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:567
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
void vmaUnmapPersistentlyMappedMemory(VmaAllocator allocator)
Unmaps persistently mapped memory of types that is HOST_COHERENT and DEVICE_LOCAL.
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:177
const VkAllocationCallbacks * pAllocationCallbacks
Custom allocation callbacks.
Definition: vk_mem_alloc.h:226
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaMemoryRequirements *pMemoryRequirements, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:208
VkDeviceSize preferredSmallHeapBlockSize
Size of a single memory block to allocate for resources from a small heap <= 512 MB.
Definition: vk_mem_alloc.h:223
VmaMemoryRequirementFlags flags
Definition: vk_mem_alloc.h:369
VkFlags VmaAllocatorFlags
Definition: vk_mem_alloc.h:205
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:571
VmaStatInfo total
Definition: vk_mem_alloc.h:284
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:579
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:562
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:217
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:441
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:354
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:557
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:575
Definition: vk_mem_alloc.h:363
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:385
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:280
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:383
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:577
VmaMemoryUsage
Definition: vk_mem_alloc.h:317
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:201
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
VmaAllocatorFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:196
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:268
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:188
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:378
Definition: vk_mem_alloc.h:367
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:192
VkResult vmaMapPersistentlyMappedMemory(VmaAllocator allocator)
Maps back persistently mapped memory of types that is HOST_COHERENT and DEVICE_LOCAL.
VkFlags VmaMemoryRequirementFlags
Definition: vk_mem_alloc.h:365
VkDeviceSize UnusedRangeSizeMax
Definition: vk_mem_alloc.h:276
VkDeviceSize SuballocationSizeMax
Definition: vk_mem_alloc.h:275
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:171
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaMemoryRequirements *pVmaMemoryRequirements, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:229
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:282
uint32_t AllocationCount
Definition: vk_mem_alloc.h:270
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:190
VmaAllocatorFlags flags
Flags for created allocator. Use VmaAllocatorFlagBits enum.
Definition: vk_mem_alloc.h:211
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
VkDeviceSize UsedBytes
Definition: vk_mem_alloc.h:273
void * pUserData
Custom general-purpose pointer that was passed as VmaMemoryRequirements::pUserData or set using vmaSe...
Definition: vk_mem_alloc.h:452
VkDeviceSize preferredLargeHeapBlockSize
Size of a single memory block to allocate for resources.
Definition: vk_mem_alloc.h:220
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaMemoryRequirements *pMemoryRequirements, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
uint32_t UnusedRangeCount
Definition: vk_mem_alloc.h:272
Memory will be mapped on host. Could be used for transfer to device.
Definition: vk_mem_alloc.h:325
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
uint32_t SuballocationCount
Definition: vk_mem_alloc.h:271
VkDeviceSize UnusedRangeSizeAvg
Definition: vk_mem_alloc.h:276
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:436
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:573
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkDeviceSize SuballocationSizeMin
Definition: vk_mem_alloc.h:275
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pCreateInfo, const VmaMemoryRequirements *pMemoryRequirements, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
void * pMappedData
Pointer to the beginning of this allocation as mapped data. Null if this alloaction is not persistent...
Definition: vk_mem_alloc.h:447
VkDeviceSize SuballocationSizeAvg
Definition: vk_mem_alloc.h:275
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
Set to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:361
No intended memory usage specified.
Definition: vk_mem_alloc.h:320
Definition: vk_mem_alloc.h:332
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:419
Memory will be used for frequent (dynamic) updates from host and reads on device. ...
Definition: vk_mem_alloc.h:328
Definition: vk_mem_alloc.h:203
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
Memory will be used on device only, no need to be mapped on host.
Definition: vk_mem_alloc.h:322
struct VmaStatInfo VmaStatInfo
VkDeviceSize UnusedBytes
Definition: vk_mem_alloc.h:274
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:283
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaMemoryRequirements *pMemoryRequirements, uint32_t *pMemoryTypeIndex)
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
VkDeviceSize UnusedRangeSizeMin
Definition: vk_mem_alloc.h:276
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:424