23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 826 #include <vulkan/vulkan.h> 828 VK_DEFINE_HANDLE(VmaAllocator)
832 VmaAllocator allocator,
834 VkDeviceMemory memory,
838 VmaAllocator allocator,
840 VkDeviceMemory memory,
989 VmaAllocator* pAllocator);
993 VmaAllocator allocator);
1000 VmaAllocator allocator,
1001 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1008 VmaAllocator allocator,
1009 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1018 VmaAllocator allocator,
1019 uint32_t memoryTypeIndex,
1020 VkMemoryPropertyFlags* pFlags);
1031 VmaAllocator allocator,
1032 uint32_t frameIndex);
1062 VmaAllocator allocator,
1065 #define VMA_STATS_STRING_ENABLED 1 1067 #if VMA_STATS_STRING_ENABLED 1073 VmaAllocator allocator,
1074 char** ppStatsString,
1075 VkBool32 detailedMap);
1078 VmaAllocator allocator,
1079 char* pStatsString);
1081 #endif // #if VMA_STATS_STRING_ENABLED 1083 VK_DEFINE_HANDLE(VmaPool)
1266 VmaAllocator allocator,
1267 uint32_t memoryTypeBits,
1269 uint32_t* pMemoryTypeIndex);
1284 VmaAllocator allocator,
1285 const VkBufferCreateInfo* pBufferCreateInfo,
1287 uint32_t* pMemoryTypeIndex);
1302 VmaAllocator allocator,
1303 const VkImageCreateInfo* pImageCreateInfo,
1305 uint32_t* pMemoryTypeIndex);
1406 VmaAllocator allocator,
1413 VmaAllocator allocator,
1423 VmaAllocator allocator,
1434 VmaAllocator allocator,
1436 size_t* pLostAllocationCount);
1438 VK_DEFINE_HANDLE(VmaAllocation)
1494 VmaAllocator allocator,
1495 const VkMemoryRequirements* pVkMemoryRequirements,
1497 VmaAllocation* pAllocation,
1507 VmaAllocator allocator,
1510 VmaAllocation* pAllocation,
1515 VmaAllocator allocator,
1518 VmaAllocation* pAllocation,
1523 VmaAllocator allocator,
1524 VmaAllocation allocation);
1531 VmaAllocator allocator,
1532 VmaAllocation allocation,
1538 VmaAllocator allocator,
1539 VmaAllocation allocation);
1555 VmaAllocator allocator,
1556 VmaAllocation allocation,
1570 VmaAllocator allocator,
1571 VmaAllocation* pAllocation);
1608 VmaAllocator allocator,
1609 VmaAllocation allocation,
1617 VmaAllocator allocator,
1618 VmaAllocation allocation);
1729 VmaAllocator allocator,
1730 VmaAllocation* pAllocations,
1731 size_t allocationCount,
1732 VkBool32* pAllocationsChanged,
1763 VmaAllocator allocator,
1764 const VkBufferCreateInfo* pBufferCreateInfo,
1767 VmaAllocation* pAllocation,
1782 VmaAllocator allocator,
1784 VmaAllocation allocation);
1788 VmaAllocator allocator,
1789 const VkImageCreateInfo* pImageCreateInfo,
1792 VmaAllocation* pAllocation,
1807 VmaAllocator allocator,
1809 VmaAllocation allocation);
1815 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1818 #ifdef __INTELLISENSE__ 1819 #define VMA_IMPLEMENTATION 1822 #ifdef VMA_IMPLEMENTATION 1823 #undef VMA_IMPLEMENTATION 1845 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 1846 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1858 #if VMA_USE_STL_CONTAINERS 1859 #define VMA_USE_STL_VECTOR 1 1860 #define VMA_USE_STL_UNORDERED_MAP 1 1861 #define VMA_USE_STL_LIST 1 1864 #if VMA_USE_STL_VECTOR 1868 #if VMA_USE_STL_UNORDERED_MAP 1869 #include <unordered_map> 1872 #if VMA_USE_STL_LIST 1881 #include <algorithm> 1885 #if !defined(_WIN32) && !defined(__APPLE__) 1891 #define VMA_NULL nullptr 1894 #if defined(__APPLE__) 1896 void *aligned_alloc(
size_t alignment,
size_t size)
1899 if(alignment <
sizeof(
void*))
1901 alignment =
sizeof(
void*);
1905 if(posix_memalign(&pointer, alignment, size) == 0)
1914 #define VMA_ASSERT(expr) assert(expr) 1916 #define VMA_ASSERT(expr) 1922 #ifndef VMA_HEAVY_ASSERT 1924 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1926 #define VMA_HEAVY_ASSERT(expr) 1930 #ifndef VMA_ALIGN_OF 1931 #define VMA_ALIGN_OF(type) (__alignof(type)) 1934 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1936 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1938 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1942 #ifndef VMA_SYSTEM_FREE 1944 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1946 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1951 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1955 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1959 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1963 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1966 #ifndef VMA_DEBUG_LOG 1967 #define VMA_DEBUG_LOG(format, ...) 1977 #if VMA_STATS_STRING_ENABLED 1978 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1980 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1982 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1984 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1986 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1988 snprintf(outStr, strLen,
"%p", ptr);
1998 void Lock() { m_Mutex.lock(); }
1999 void Unlock() { m_Mutex.unlock(); }
2003 #define VMA_MUTEX VmaMutex 2014 #ifndef VMA_ATOMIC_UINT32 2015 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2018 #ifndef VMA_BEST_FIT 2031 #define VMA_BEST_FIT (1) 2034 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2039 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2042 #ifndef VMA_DEBUG_ALIGNMENT 2047 #define VMA_DEBUG_ALIGNMENT (1) 2050 #ifndef VMA_DEBUG_MARGIN 2055 #define VMA_DEBUG_MARGIN (0) 2058 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2063 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2066 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2071 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2074 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2075 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2079 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2080 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2084 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2090 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2091 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2094 static inline uint32_t VmaCountBitsSet(uint32_t v)
2096 uint32_t c = v - ((v >> 1) & 0x55555555);
2097 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2098 c = ((c >> 4) + c) & 0x0F0F0F0F;
2099 c = ((c >> 8) + c) & 0x00FF00FF;
2100 c = ((c >> 16) + c) & 0x0000FFFF;
2106 template <
typename T>
2107 static inline T VmaAlignUp(T val, T align)
2109 return (val + align - 1) / align * align;
2113 template <
typename T>
2114 inline T VmaRoundDiv(T x, T y)
2116 return (x + (y / (T)2)) / y;
2121 template<
typename Iterator,
typename Compare>
2122 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2124 Iterator centerValue = end; --centerValue;
2125 Iterator insertIndex = beg;
2126 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2128 if(cmp(*memTypeIndex, *centerValue))
2130 if(insertIndex != memTypeIndex)
2132 VMA_SWAP(*memTypeIndex, *insertIndex);
2137 if(insertIndex != centerValue)
2139 VMA_SWAP(*insertIndex, *centerValue);
2144 template<
typename Iterator,
typename Compare>
2145 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2149 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2150 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2151 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2155 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2157 #endif // #ifndef VMA_SORT 2166 static inline bool VmaBlocksOnSamePage(
2167 VkDeviceSize resourceAOffset,
2168 VkDeviceSize resourceASize,
2169 VkDeviceSize resourceBOffset,
2170 VkDeviceSize pageSize)
2172 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2173 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2174 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2175 VkDeviceSize resourceBStart = resourceBOffset;
2176 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2177 return resourceAEndPage == resourceBStartPage;
2180 enum VmaSuballocationType
2182 VMA_SUBALLOCATION_TYPE_FREE = 0,
2183 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2184 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2185 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2186 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2187 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2188 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2197 static inline bool VmaIsBufferImageGranularityConflict(
2198 VmaSuballocationType suballocType1,
2199 VmaSuballocationType suballocType2)
2201 if(suballocType1 > suballocType2)
2203 VMA_SWAP(suballocType1, suballocType2);
2206 switch(suballocType1)
2208 case VMA_SUBALLOCATION_TYPE_FREE:
2210 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2212 case VMA_SUBALLOCATION_TYPE_BUFFER:
2214 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2215 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2216 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2218 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2219 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2220 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2221 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2223 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2224 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2236 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2237 m_pMutex(useMutex ? &mutex : VMA_NULL)
2254 VMA_MUTEX* m_pMutex;
2257 #if VMA_DEBUG_GLOBAL_MUTEX 2258 static VMA_MUTEX gDebugGlobalMutex;
2259 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2261 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2265 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2276 template <
typename IterT,
typename KeyT,
typename CmpT>
2277 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2279 size_t down = 0, up = (end - beg);
2282 const size_t mid = (down + up) / 2;
2283 if(cmp(*(beg+mid), key))
2298 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2300 if((pAllocationCallbacks != VMA_NULL) &&
2301 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2303 return (*pAllocationCallbacks->pfnAllocation)(
2304 pAllocationCallbacks->pUserData,
2307 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2311 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2315 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2317 if((pAllocationCallbacks != VMA_NULL) &&
2318 (pAllocationCallbacks->pfnFree != VMA_NULL))
2320 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2324 VMA_SYSTEM_FREE(ptr);
2328 template<
typename T>
2329 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2331 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2334 template<
typename T>
2335 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2337 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2340 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2342 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2344 template<
typename T>
2345 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2348 VmaFree(pAllocationCallbacks, ptr);
2351 template<
typename T>
2352 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2356 for(
size_t i = count; i--; )
2360 VmaFree(pAllocationCallbacks, ptr);
2365 template<
typename T>
2366 class VmaStlAllocator
2369 const VkAllocationCallbacks*
const m_pCallbacks;
2370 typedef T value_type;
2372 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2373 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2375 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2376 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2378 template<
typename U>
2379 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2381 return m_pCallbacks == rhs.m_pCallbacks;
2383 template<
typename U>
2384 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2386 return m_pCallbacks != rhs.m_pCallbacks;
2389 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2392 #if VMA_USE_STL_VECTOR 2394 #define VmaVector std::vector 2396 template<
typename T,
typename allocatorT>
2397 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2399 vec.insert(vec.begin() + index, item);
2402 template<
typename T,
typename allocatorT>
2403 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2405 vec.erase(vec.begin() + index);
2408 #else // #if VMA_USE_STL_VECTOR 2413 template<
typename T,
typename AllocatorT>
2417 typedef T value_type;
2419 VmaVector(
const AllocatorT& allocator) :
2420 m_Allocator(allocator),
2427 VmaVector(
size_t count,
const AllocatorT& allocator) :
2428 m_Allocator(allocator),
2429 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2435 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2436 m_Allocator(src.m_Allocator),
2437 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2438 m_Count(src.m_Count),
2439 m_Capacity(src.m_Count)
2443 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2449 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2452 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2456 resize(rhs.m_Count);
2459 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2465 bool empty()
const {
return m_Count == 0; }
2466 size_t size()
const {
return m_Count; }
2467 T* data() {
return m_pArray; }
2468 const T* data()
const {
return m_pArray; }
2470 T& operator[](
size_t index)
2472 VMA_HEAVY_ASSERT(index < m_Count);
2473 return m_pArray[index];
2475 const T& operator[](
size_t index)
const 2477 VMA_HEAVY_ASSERT(index < m_Count);
2478 return m_pArray[index];
2483 VMA_HEAVY_ASSERT(m_Count > 0);
2486 const T& front()
const 2488 VMA_HEAVY_ASSERT(m_Count > 0);
2493 VMA_HEAVY_ASSERT(m_Count > 0);
2494 return m_pArray[m_Count - 1];
2496 const T& back()
const 2498 VMA_HEAVY_ASSERT(m_Count > 0);
2499 return m_pArray[m_Count - 1];
2502 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2504 newCapacity = VMA_MAX(newCapacity, m_Count);
2506 if((newCapacity < m_Capacity) && !freeMemory)
2508 newCapacity = m_Capacity;
2511 if(newCapacity != m_Capacity)
2513 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2516 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2518 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2519 m_Capacity = newCapacity;
2520 m_pArray = newArray;
2524 void resize(
size_t newCount,
bool freeMemory =
false)
2526 size_t newCapacity = m_Capacity;
2527 if(newCount > m_Capacity)
2529 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2533 newCapacity = newCount;
2536 if(newCapacity != m_Capacity)
2538 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2539 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2540 if(elementsToCopy != 0)
2542 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2544 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2545 m_Capacity = newCapacity;
2546 m_pArray = newArray;
2552 void clear(
bool freeMemory =
false)
2554 resize(0, freeMemory);
2557 void insert(
size_t index,
const T& src)
2559 VMA_HEAVY_ASSERT(index <= m_Count);
2560 const size_t oldCount = size();
2561 resize(oldCount + 1);
2562 if(index < oldCount)
2564 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2566 m_pArray[index] = src;
2569 void remove(
size_t index)
2571 VMA_HEAVY_ASSERT(index < m_Count);
2572 const size_t oldCount = size();
2573 if(index < oldCount - 1)
2575 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2577 resize(oldCount - 1);
2580 void push_back(
const T& src)
2582 const size_t newIndex = size();
2583 resize(newIndex + 1);
2584 m_pArray[newIndex] = src;
2589 VMA_HEAVY_ASSERT(m_Count > 0);
2593 void push_front(
const T& src)
2600 VMA_HEAVY_ASSERT(m_Count > 0);
2604 typedef T* iterator;
2606 iterator begin() {
return m_pArray; }
2607 iterator end() {
return m_pArray + m_Count; }
2610 AllocatorT m_Allocator;
2616 template<
typename T,
typename allocatorT>
2617 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2619 vec.insert(index, item);
2622 template<
typename T,
typename allocatorT>
2623 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2628 #endif // #if VMA_USE_STL_VECTOR 2630 template<
typename CmpLess,
typename VectorT>
2631 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2633 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2635 vector.data() + vector.size(),
2637 CmpLess()) - vector.data();
2638 VmaVectorInsert(vector, indexToInsert, value);
2639 return indexToInsert;
2642 template<
typename CmpLess,
typename VectorT>
2643 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2646 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2651 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2653 size_t indexToRemove = it - vector.begin();
2654 VmaVectorRemove(vector, indexToRemove);
2660 template<
typename CmpLess,
typename VectorT>
2661 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2664 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2666 vector.data() + vector.size(),
2669 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2671 return it - vector.begin();
2675 return vector.size();
2687 template<
typename T>
2688 class VmaPoolAllocator
2691 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2692 ~VmaPoolAllocator();
2700 uint32_t NextFreeIndex;
2707 uint32_t FirstFreeIndex;
2710 const VkAllocationCallbacks* m_pAllocationCallbacks;
2711 size_t m_ItemsPerBlock;
2712 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2714 ItemBlock& CreateNewBlock();
2717 template<
typename T>
2718 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2719 m_pAllocationCallbacks(pAllocationCallbacks),
2720 m_ItemsPerBlock(itemsPerBlock),
2721 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2723 VMA_ASSERT(itemsPerBlock > 0);
2726 template<
typename T>
2727 VmaPoolAllocator<T>::~VmaPoolAllocator()
2732 template<
typename T>
2733 void VmaPoolAllocator<T>::Clear()
2735 for(
size_t i = m_ItemBlocks.size(); i--; )
2736 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2737 m_ItemBlocks.clear();
2740 template<
typename T>
2741 T* VmaPoolAllocator<T>::Alloc()
2743 for(
size_t i = m_ItemBlocks.size(); i--; )
2745 ItemBlock& block = m_ItemBlocks[i];
2747 if(block.FirstFreeIndex != UINT32_MAX)
2749 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2750 block.FirstFreeIndex = pItem->NextFreeIndex;
2751 return &pItem->Value;
2756 ItemBlock& newBlock = CreateNewBlock();
2757 Item*
const pItem = &newBlock.pItems[0];
2758 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2759 return &pItem->Value;
2762 template<
typename T>
2763 void VmaPoolAllocator<T>::Free(T* ptr)
2766 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2768 ItemBlock& block = m_ItemBlocks[i];
2772 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2775 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2777 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2778 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2779 block.FirstFreeIndex = index;
2783 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2786 template<
typename T>
2787 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2789 ItemBlock newBlock = {
2790 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2792 m_ItemBlocks.push_back(newBlock);
2795 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2796 newBlock.pItems[i].NextFreeIndex = i + 1;
2797 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2798 return m_ItemBlocks.back();
2804 #if VMA_USE_STL_LIST 2806 #define VmaList std::list 2808 #else // #if VMA_USE_STL_LIST 2810 template<
typename T>
2819 template<
typename T>
2823 typedef VmaListItem<T> ItemType;
2825 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2829 size_t GetCount()
const {
return m_Count; }
2830 bool IsEmpty()
const {
return m_Count == 0; }
2832 ItemType* Front() {
return m_pFront; }
2833 const ItemType* Front()
const {
return m_pFront; }
2834 ItemType* Back() {
return m_pBack; }
2835 const ItemType* Back()
const {
return m_pBack; }
2837 ItemType* PushBack();
2838 ItemType* PushFront();
2839 ItemType* PushBack(
const T& value);
2840 ItemType* PushFront(
const T& value);
2845 ItemType* InsertBefore(ItemType* pItem);
2847 ItemType* InsertAfter(ItemType* pItem);
2849 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2850 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2852 void Remove(ItemType* pItem);
2855 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2856 VmaPoolAllocator<ItemType> m_ItemAllocator;
2862 VmaRawList(
const VmaRawList<T>& src);
2863 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2866 template<
typename T>
2867 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2868 m_pAllocationCallbacks(pAllocationCallbacks),
2869 m_ItemAllocator(pAllocationCallbacks, 128),
2876 template<
typename T>
2877 VmaRawList<T>::~VmaRawList()
2883 template<
typename T>
2884 void VmaRawList<T>::Clear()
2886 if(IsEmpty() ==
false)
2888 ItemType* pItem = m_pBack;
2889 while(pItem != VMA_NULL)
2891 ItemType*
const pPrevItem = pItem->pPrev;
2892 m_ItemAllocator.Free(pItem);
2895 m_pFront = VMA_NULL;
2901 template<
typename T>
2902 VmaListItem<T>* VmaRawList<T>::PushBack()
2904 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2905 pNewItem->pNext = VMA_NULL;
2908 pNewItem->pPrev = VMA_NULL;
2909 m_pFront = pNewItem;
2915 pNewItem->pPrev = m_pBack;
2916 m_pBack->pNext = pNewItem;
2923 template<
typename T>
2924 VmaListItem<T>* VmaRawList<T>::PushFront()
2926 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2927 pNewItem->pPrev = VMA_NULL;
2930 pNewItem->pNext = VMA_NULL;
2931 m_pFront = pNewItem;
2937 pNewItem->pNext = m_pFront;
2938 m_pFront->pPrev = pNewItem;
2939 m_pFront = pNewItem;
2945 template<
typename T>
2946 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2948 ItemType*
const pNewItem = PushBack();
2949 pNewItem->Value = value;
2953 template<
typename T>
2954 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2956 ItemType*
const pNewItem = PushFront();
2957 pNewItem->Value = value;
2961 template<
typename T>
2962 void VmaRawList<T>::PopBack()
2964 VMA_HEAVY_ASSERT(m_Count > 0);
2965 ItemType*
const pBackItem = m_pBack;
2966 ItemType*
const pPrevItem = pBackItem->pPrev;
2967 if(pPrevItem != VMA_NULL)
2969 pPrevItem->pNext = VMA_NULL;
2971 m_pBack = pPrevItem;
2972 m_ItemAllocator.Free(pBackItem);
2976 template<
typename T>
2977 void VmaRawList<T>::PopFront()
2979 VMA_HEAVY_ASSERT(m_Count > 0);
2980 ItemType*
const pFrontItem = m_pFront;
2981 ItemType*
const pNextItem = pFrontItem->pNext;
2982 if(pNextItem != VMA_NULL)
2984 pNextItem->pPrev = VMA_NULL;
2986 m_pFront = pNextItem;
2987 m_ItemAllocator.Free(pFrontItem);
2991 template<
typename T>
2992 void VmaRawList<T>::Remove(ItemType* pItem)
2994 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2995 VMA_HEAVY_ASSERT(m_Count > 0);
2997 if(pItem->pPrev != VMA_NULL)
2999 pItem->pPrev->pNext = pItem->pNext;
3003 VMA_HEAVY_ASSERT(m_pFront == pItem);
3004 m_pFront = pItem->pNext;
3007 if(pItem->pNext != VMA_NULL)
3009 pItem->pNext->pPrev = pItem->pPrev;
3013 VMA_HEAVY_ASSERT(m_pBack == pItem);
3014 m_pBack = pItem->pPrev;
3017 m_ItemAllocator.Free(pItem);
3021 template<
typename T>
3022 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3024 if(pItem != VMA_NULL)
3026 ItemType*
const prevItem = pItem->pPrev;
3027 ItemType*
const newItem = m_ItemAllocator.Alloc();
3028 newItem->pPrev = prevItem;
3029 newItem->pNext = pItem;
3030 pItem->pPrev = newItem;
3031 if(prevItem != VMA_NULL)
3033 prevItem->pNext = newItem;
3037 VMA_HEAVY_ASSERT(m_pFront == pItem);
3047 template<
typename T>
3048 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3050 if(pItem != VMA_NULL)
3052 ItemType*
const nextItem = pItem->pNext;
3053 ItemType*
const newItem = m_ItemAllocator.Alloc();
3054 newItem->pNext = nextItem;
3055 newItem->pPrev = pItem;
3056 pItem->pNext = newItem;
3057 if(nextItem != VMA_NULL)
3059 nextItem->pPrev = newItem;
3063 VMA_HEAVY_ASSERT(m_pBack == pItem);
3073 template<
typename T>
3074 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3076 ItemType*
const newItem = InsertBefore(pItem);
3077 newItem->Value = value;
3081 template<
typename T>
3082 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3084 ItemType*
const newItem = InsertAfter(pItem);
3085 newItem->Value = value;
3089 template<
typename T,
typename AllocatorT>
3102 T& operator*()
const 3104 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3105 return m_pItem->Value;
3107 T* operator->()
const 3109 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3110 return &m_pItem->Value;
3113 iterator& operator++()
3115 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3116 m_pItem = m_pItem->pNext;
3119 iterator& operator--()
3121 if(m_pItem != VMA_NULL)
3123 m_pItem = m_pItem->pPrev;
3127 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3128 m_pItem = m_pList->Back();
3133 iterator operator++(
int)
3135 iterator result = *
this;
3139 iterator operator--(
int)
3141 iterator result = *
this;
3146 bool operator==(
const iterator& rhs)
const 3148 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3149 return m_pItem == rhs.m_pItem;
3151 bool operator!=(
const iterator& rhs)
const 3153 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3154 return m_pItem != rhs.m_pItem;
3158 VmaRawList<T>* m_pList;
3159 VmaListItem<T>* m_pItem;
3161 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3167 friend class VmaList<T, AllocatorT>;
3170 class const_iterator
3179 const_iterator(
const iterator& src) :
3180 m_pList(src.m_pList),
3181 m_pItem(src.m_pItem)
3185 const T& operator*()
const 3187 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3188 return m_pItem->Value;
3190 const T* operator->()
const 3192 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3193 return &m_pItem->Value;
3196 const_iterator& operator++()
3198 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3199 m_pItem = m_pItem->pNext;
3202 const_iterator& operator--()
3204 if(m_pItem != VMA_NULL)
3206 m_pItem = m_pItem->pPrev;
3210 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3211 m_pItem = m_pList->Back();
3216 const_iterator operator++(
int)
3218 const_iterator result = *
this;
3222 const_iterator operator--(
int)
3224 const_iterator result = *
this;
3229 bool operator==(
const const_iterator& rhs)
const 3231 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3232 return m_pItem == rhs.m_pItem;
3234 bool operator!=(
const const_iterator& rhs)
const 3236 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3237 return m_pItem != rhs.m_pItem;
3241 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3247 const VmaRawList<T>* m_pList;
3248 const VmaListItem<T>* m_pItem;
3250 friend class VmaList<T, AllocatorT>;
3253 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3255 bool empty()
const {
return m_RawList.IsEmpty(); }
3256 size_t size()
const {
return m_RawList.GetCount(); }
3258 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3259 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3261 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3262 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3264 void clear() { m_RawList.Clear(); }
3265 void push_back(
const T& value) { m_RawList.PushBack(value); }
3266 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3267 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3270 VmaRawList<T> m_RawList;
3273 #endif // #if VMA_USE_STL_LIST 3281 #if VMA_USE_STL_UNORDERED_MAP 3283 #define VmaPair std::pair 3285 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3286 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3288 #else // #if VMA_USE_STL_UNORDERED_MAP 3290 template<
typename T1,
typename T2>
3296 VmaPair() : first(), second() { }
3297 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3303 template<
typename KeyT,
typename ValueT>
3307 typedef VmaPair<KeyT, ValueT> PairType;
3308 typedef PairType* iterator;
3310 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3312 iterator begin() {
return m_Vector.begin(); }
3313 iterator end() {
return m_Vector.end(); }
3315 void insert(
const PairType& pair);
3316 iterator find(
const KeyT& key);
3317 void erase(iterator it);
3320 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3323 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3325 template<
typename FirstT,
typename SecondT>
3326 struct VmaPairFirstLess
3328 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3330 return lhs.first < rhs.first;
3332 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3334 return lhs.first < rhsFirst;
3338 template<
typename KeyT,
typename ValueT>
3339 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3341 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3343 m_Vector.data() + m_Vector.size(),
3345 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3346 VmaVectorInsert(m_Vector, indexToInsert, pair);
3349 template<
typename KeyT,
typename ValueT>
3350 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3352 PairType* it = VmaBinaryFindFirstNotLess(
3354 m_Vector.data() + m_Vector.size(),
3356 VmaPairFirstLess<KeyT, ValueT>());
3357 if((it != m_Vector.end()) && (it->first == key))
3363 return m_Vector.end();
3367 template<
typename KeyT,
typename ValueT>
3368 void VmaMap<KeyT, ValueT>::erase(iterator it)
3370 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3373 #endif // #if VMA_USE_STL_UNORDERED_MAP 3379 class VmaDeviceMemoryBlock;
3381 struct VmaAllocation_T
3384 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3388 FLAG_USER_DATA_STRING = 0x01,
3392 enum ALLOCATION_TYPE
3394 ALLOCATION_TYPE_NONE,
3395 ALLOCATION_TYPE_BLOCK,
3396 ALLOCATION_TYPE_DEDICATED,
3399 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3402 m_pUserData(VMA_NULL),
3403 m_LastUseFrameIndex(currentFrameIndex),
3404 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3405 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3407 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3413 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3416 VMA_ASSERT(m_pUserData == VMA_NULL);
3419 void InitBlockAllocation(
3421 VmaDeviceMemoryBlock* block,
3422 VkDeviceSize offset,
3423 VkDeviceSize alignment,
3425 VmaSuballocationType suballocationType,
3429 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3430 VMA_ASSERT(block != VMA_NULL);
3431 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3432 m_Alignment = alignment;
3434 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3435 m_SuballocationType = (uint8_t)suballocationType;
3436 m_BlockAllocation.m_hPool = hPool;
3437 m_BlockAllocation.m_Block = block;
3438 m_BlockAllocation.m_Offset = offset;
3439 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3444 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3445 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3446 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3447 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3448 m_BlockAllocation.m_Block = VMA_NULL;
3449 m_BlockAllocation.m_Offset = 0;
3450 m_BlockAllocation.m_CanBecomeLost =
true;
3453 void ChangeBlockAllocation(
3454 VmaAllocator hAllocator,
3455 VmaDeviceMemoryBlock* block,
3456 VkDeviceSize offset);
3459 void InitDedicatedAllocation(
3460 uint32_t memoryTypeIndex,
3461 VkDeviceMemory hMemory,
3462 VmaSuballocationType suballocationType,
3466 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3467 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3468 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3471 m_SuballocationType = (uint8_t)suballocationType;
3472 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3473 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3474 m_DedicatedAllocation.m_hMemory = hMemory;
3475 m_DedicatedAllocation.m_pMappedData = pMappedData;
3478 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3479 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3480 VkDeviceSize GetSize()
const {
return m_Size; }
3481 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3482 void* GetUserData()
const {
return m_pUserData; }
3483 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3484 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3486 VmaDeviceMemoryBlock* GetBlock()
const 3488 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3489 return m_BlockAllocation.m_Block;
3491 VkDeviceSize GetOffset()
const;
3492 VkDeviceMemory GetMemory()
const;
3493 uint32_t GetMemoryTypeIndex()
const;
3494 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3495 void* GetMappedData()
const;
3496 bool CanBecomeLost()
const;
3497 VmaPool GetPool()
const;
3499 uint32_t GetLastUseFrameIndex()
const 3501 return m_LastUseFrameIndex.load();
3503 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3505 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3515 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3517 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3519 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3530 void BlockAllocMap();
3531 void BlockAllocUnmap();
3532 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3533 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3536 VkDeviceSize m_Alignment;
3537 VkDeviceSize m_Size;
3539 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3541 uint8_t m_SuballocationType;
3548 struct BlockAllocation
3551 VmaDeviceMemoryBlock* m_Block;
3552 VkDeviceSize m_Offset;
3553 bool m_CanBecomeLost;
3557 struct DedicatedAllocation
3559 uint32_t m_MemoryTypeIndex;
3560 VkDeviceMemory m_hMemory;
3561 void* m_pMappedData;
3567 BlockAllocation m_BlockAllocation;
3569 DedicatedAllocation m_DedicatedAllocation;
3572 void FreeUserDataString(VmaAllocator hAllocator);
3579 struct VmaSuballocation
3581 VkDeviceSize offset;
3583 VmaAllocation hAllocation;
3584 VmaSuballocationType type;
3587 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3590 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3605 struct VmaAllocationRequest
3607 VkDeviceSize offset;
3608 VkDeviceSize sumFreeSize;
3609 VkDeviceSize sumItemSize;
3610 VmaSuballocationList::iterator item;
3611 size_t itemsToMakeLostCount;
3613 VkDeviceSize CalcCost()
const 3615 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3623 class VmaBlockMetadata
3626 VmaBlockMetadata(VmaAllocator hAllocator);
3627 ~VmaBlockMetadata();
3628 void Init(VkDeviceSize size);
3631 bool Validate()
const;
3632 VkDeviceSize GetSize()
const {
return m_Size; }
3633 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3634 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3635 VkDeviceSize GetUnusedRangeSizeMax()
const;
3637 bool IsEmpty()
const;
3639 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3642 #if VMA_STATS_STRING_ENABLED 3643 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3647 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3652 bool CreateAllocationRequest(
3653 uint32_t currentFrameIndex,
3654 uint32_t frameInUseCount,
3655 VkDeviceSize bufferImageGranularity,
3656 VkDeviceSize allocSize,
3657 VkDeviceSize allocAlignment,
3658 VmaSuballocationType allocType,
3659 bool canMakeOtherLost,
3660 VmaAllocationRequest* pAllocationRequest);
3662 bool MakeRequestedAllocationsLost(
3663 uint32_t currentFrameIndex,
3664 uint32_t frameInUseCount,
3665 VmaAllocationRequest* pAllocationRequest);
3667 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3671 const VmaAllocationRequest& request,
3672 VmaSuballocationType type,
3673 VkDeviceSize allocSize,
3674 VmaAllocation hAllocation);
3677 void Free(
const VmaAllocation allocation);
3678 void FreeAtOffset(VkDeviceSize offset);
3681 VkDeviceSize m_Size;
3682 uint32_t m_FreeCount;
3683 VkDeviceSize m_SumFreeSize;
3684 VmaSuballocationList m_Suballocations;
3687 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3689 bool ValidateFreeSuballocationList()
const;
3693 bool CheckAllocation(
3694 uint32_t currentFrameIndex,
3695 uint32_t frameInUseCount,
3696 VkDeviceSize bufferImageGranularity,
3697 VkDeviceSize allocSize,
3698 VkDeviceSize allocAlignment,
3699 VmaSuballocationType allocType,
3700 VmaSuballocationList::const_iterator suballocItem,
3701 bool canMakeOtherLost,
3702 VkDeviceSize* pOffset,
3703 size_t* itemsToMakeLostCount,
3704 VkDeviceSize* pSumFreeSize,
3705 VkDeviceSize* pSumItemSize)
const;
3707 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3711 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3714 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3717 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3721 class VmaDeviceMemoryMapping
3724 VmaDeviceMemoryMapping();
3725 ~VmaDeviceMemoryMapping();
3727 void* GetMappedData()
const {
return m_pMappedData; }
3730 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData);
3731 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count);
3735 uint32_t m_MapCount;
3736 void* m_pMappedData;
3745 class VmaDeviceMemoryBlock
3748 uint32_t m_MemoryTypeIndex;
3749 VkDeviceMemory m_hMemory;
3750 VmaDeviceMemoryMapping m_Mapping;
3751 VmaBlockMetadata m_Metadata;
3753 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3755 ~VmaDeviceMemoryBlock()
3757 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3762 uint32_t newMemoryTypeIndex,
3763 VkDeviceMemory newMemory,
3764 VkDeviceSize newSize);
3766 void Destroy(VmaAllocator allocator);
3769 bool Validate()
const;
3772 VkResult Map(VmaAllocator hAllocator, uint32_t count,
void** ppData);
3773 void Unmap(VmaAllocator hAllocator, uint32_t count);
3776 struct VmaPointerLess
3778 bool operator()(
const void* lhs,
const void* rhs)
const 3784 class VmaDefragmentator;
3792 struct VmaBlockVector
3795 VmaAllocator hAllocator,
3796 uint32_t memoryTypeIndex,
3797 VkDeviceSize preferredBlockSize,
3798 size_t minBlockCount,
3799 size_t maxBlockCount,
3800 VkDeviceSize bufferImageGranularity,
3801 uint32_t frameInUseCount,
3805 VkResult CreateMinBlocks();
3807 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3808 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3809 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3810 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3814 bool IsEmpty()
const {
return m_Blocks.empty(); }
3817 VmaPool hCurrentPool,
3818 uint32_t currentFrameIndex,
3819 const VkMemoryRequirements& vkMemReq,
3821 VmaSuballocationType suballocType,
3822 VmaAllocation* pAllocation);
3825 VmaAllocation hAllocation);
3830 #if VMA_STATS_STRING_ENABLED 3831 void PrintDetailedMap(
class VmaJsonWriter& json);
3834 void MakePoolAllocationsLost(
3835 uint32_t currentFrameIndex,
3836 size_t* pLostAllocationCount);
3838 VmaDefragmentator* EnsureDefragmentator(
3839 VmaAllocator hAllocator,
3840 uint32_t currentFrameIndex);
3842 VkResult Defragment(
3844 VkDeviceSize& maxBytesToMove,
3845 uint32_t& maxAllocationsToMove);
3847 void DestroyDefragmentator();
3850 friend class VmaDefragmentator;
3852 const VmaAllocator m_hAllocator;
3853 const uint32_t m_MemoryTypeIndex;
3854 const VkDeviceSize m_PreferredBlockSize;
3855 const size_t m_MinBlockCount;
3856 const size_t m_MaxBlockCount;
3857 const VkDeviceSize m_BufferImageGranularity;
3858 const uint32_t m_FrameInUseCount;
3859 const bool m_IsCustomPool;
3862 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3866 bool m_HasEmptyBlock;
3867 VmaDefragmentator* m_pDefragmentator;
3869 size_t CalcMaxBlockSize()
const;
3872 void Remove(VmaDeviceMemoryBlock* pBlock);
3876 void IncrementallySortBlocks();
3878 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3884 VmaBlockVector m_BlockVector;
3888 VmaAllocator hAllocator,
3892 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3894 #if VMA_STATS_STRING_ENABLED 3899 class VmaDefragmentator
3901 const VmaAllocator m_hAllocator;
3902 VmaBlockVector*
const m_pBlockVector;
3903 uint32_t m_CurrentFrameIndex;
3904 VkDeviceSize m_BytesMoved;
3905 uint32_t m_AllocationsMoved;
3907 struct AllocationInfo
3909 VmaAllocation m_hAllocation;
3910 VkBool32* m_pChanged;
3913 m_hAllocation(VK_NULL_HANDLE),
3914 m_pChanged(VMA_NULL)
3919 struct AllocationInfoSizeGreater
3921 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3923 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3928 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3932 VmaDeviceMemoryBlock* m_pBlock;
3933 bool m_HasNonMovableAllocations;
3934 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3936 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3938 m_HasNonMovableAllocations(true),
3939 m_Allocations(pAllocationCallbacks),
3940 m_pMappedDataForDefragmentation(VMA_NULL)
3944 void CalcHasNonMovableAllocations()
3946 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3947 const size_t defragmentAllocCount = m_Allocations.size();
3948 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3951 void SortAllocationsBySizeDescecnding()
3953 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3956 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3957 void Unmap(VmaAllocator hAllocator);
3961 void* m_pMappedDataForDefragmentation;
3964 struct BlockPointerLess
3966 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3968 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3970 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3972 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3978 struct BlockInfoCompareMoveDestination
3980 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3982 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3986 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3990 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3998 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3999 BlockInfoVector m_Blocks;
4001 VkResult DefragmentRound(
4002 VkDeviceSize maxBytesToMove,
4003 uint32_t maxAllocationsToMove);
4005 static bool MoveMakesSense(
4006 size_t dstBlockIndex, VkDeviceSize dstOffset,
4007 size_t srcBlockIndex, VkDeviceSize srcOffset);
4011 VmaAllocator hAllocator,
4012 VmaBlockVector* pBlockVector,
4013 uint32_t currentFrameIndex);
4015 ~VmaDefragmentator();
4017 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4018 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4020 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
4022 VkResult Defragment(
4023 VkDeviceSize maxBytesToMove,
4024 uint32_t maxAllocationsToMove);
4028 struct VmaAllocator_T
4031 bool m_UseKhrDedicatedAllocation;
4033 bool m_AllocationCallbacksSpecified;
4034 VkAllocationCallbacks m_AllocationCallbacks;
4038 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4039 VMA_MUTEX m_HeapSizeLimitMutex;
4041 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4042 VkPhysicalDeviceMemoryProperties m_MemProps;
4045 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4048 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4049 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4050 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4055 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4057 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4061 return m_VulkanFunctions;
4064 VkDeviceSize GetBufferImageGranularity()
const 4067 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4068 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4071 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4072 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4074 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4076 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4077 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4080 void GetBufferMemoryRequirements(
4082 VkMemoryRequirements& memReq,
4083 bool& requiresDedicatedAllocation,
4084 bool& prefersDedicatedAllocation)
const;
4085 void GetImageMemoryRequirements(
4087 VkMemoryRequirements& memReq,
4088 bool& requiresDedicatedAllocation,
4089 bool& prefersDedicatedAllocation)
const;
4092 VkResult AllocateMemory(
4093 const VkMemoryRequirements& vkMemReq,
4094 bool requiresDedicatedAllocation,
4095 bool prefersDedicatedAllocation,
4096 VkBuffer dedicatedBuffer,
4097 VkImage dedicatedImage,
4099 VmaSuballocationType suballocType,
4100 VmaAllocation* pAllocation);
4103 void FreeMemory(
const VmaAllocation allocation);
4105 void CalculateStats(
VmaStats* pStats);
4107 #if VMA_STATS_STRING_ENABLED 4108 void PrintDetailedMap(
class VmaJsonWriter& json);
4111 VkResult Defragment(
4112 VmaAllocation* pAllocations,
4113 size_t allocationCount,
4114 VkBool32* pAllocationsChanged,
4118 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
4119 bool TouchAllocation(VmaAllocation hAllocation);
4122 void DestroyPool(VmaPool pool);
4123 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
4125 void SetCurrentFrameIndex(uint32_t frameIndex);
4127 void MakePoolAllocationsLost(
4129 size_t* pLostAllocationCount);
4131 void CreateLostAllocation(VmaAllocation* pAllocation);
4133 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4134 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4136 VkResult Map(VmaAllocation hAllocation,
void** ppData);
4137 void Unmap(VmaAllocation hAllocation);
4140 VkDeviceSize m_PreferredLargeHeapBlockSize;
4142 VkPhysicalDevice m_PhysicalDevice;
4143 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4145 VMA_MUTEX m_PoolsMutex;
4147 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4153 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4155 VkResult AllocateMemoryOfType(
4156 const VkMemoryRequirements& vkMemReq,
4157 bool dedicatedAllocation,
4158 VkBuffer dedicatedBuffer,
4159 VkImage dedicatedImage,
4161 uint32_t memTypeIndex,
4162 VmaSuballocationType suballocType,
4163 VmaAllocation* pAllocation);
4166 VkResult AllocateDedicatedMemory(
4168 VmaSuballocationType suballocType,
4169 uint32_t memTypeIndex,
4171 bool isUserDataString,
4173 VkBuffer dedicatedBuffer,
4174 VkImage dedicatedImage,
4175 VmaAllocation* pAllocation);
4178 void FreeDedicatedMemory(VmaAllocation allocation);
4184 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
4186 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4189 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
4191 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4194 template<
typename T>
4195 static T* VmaAllocate(VmaAllocator hAllocator)
4197 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4200 template<
typename T>
4201 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
4203 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4206 template<
typename T>
4207 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4212 VmaFree(hAllocator, ptr);
4216 template<
typename T>
4217 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4221 for(
size_t i = count; i--; )
4223 VmaFree(hAllocator, ptr);
4230 #if VMA_STATS_STRING_ENABLED 4232 class VmaStringBuilder
4235 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4236 size_t GetLength()
const {
return m_Data.size(); }
4237 const char* GetData()
const {
return m_Data.data(); }
4239 void Add(
char ch) { m_Data.push_back(ch); }
4240 void Add(
const char* pStr);
4241 void AddNewLine() { Add(
'\n'); }
4242 void AddNumber(uint32_t num);
4243 void AddNumber(uint64_t num);
4244 void AddPointer(
const void* ptr);
4247 VmaVector< char, VmaStlAllocator<char> > m_Data;
4250 void VmaStringBuilder::Add(
const char* pStr)
4252 const size_t strLen = strlen(pStr);
4255 const size_t oldCount = m_Data.size();
4256 m_Data.resize(oldCount + strLen);
4257 memcpy(m_Data.data() + oldCount, pStr, strLen);
4261 void VmaStringBuilder::AddNumber(uint32_t num)
4264 VmaUint32ToStr(buf,
sizeof(buf), num);
4268 void VmaStringBuilder::AddNumber(uint64_t num)
4271 VmaUint64ToStr(buf,
sizeof(buf), num);
4275 void VmaStringBuilder::AddPointer(
const void* ptr)
4278 VmaPtrToStr(buf,
sizeof(buf), ptr);
4282 #endif // #if VMA_STATS_STRING_ENABLED 4287 #if VMA_STATS_STRING_ENABLED 4292 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4295 void BeginObject(
bool singleLine =
false);
4298 void BeginArray(
bool singleLine =
false);
4301 void WriteString(
const char* pStr);
4302 void BeginString(
const char* pStr = VMA_NULL);
4303 void ContinueString(
const char* pStr);
4304 void ContinueString(uint32_t n);
4305 void ContinueString(uint64_t n);
4306 void ContinueString_Pointer(
const void* ptr);
4307 void EndString(
const char* pStr = VMA_NULL);
4309 void WriteNumber(uint32_t n);
4310 void WriteNumber(uint64_t n);
4311 void WriteBool(
bool b);
4315 static const char*
const INDENT;
4317 enum COLLECTION_TYPE
4319 COLLECTION_TYPE_OBJECT,
4320 COLLECTION_TYPE_ARRAY,
4324 COLLECTION_TYPE type;
4325 uint32_t valueCount;
4326 bool singleLineMode;
4329 VmaStringBuilder& m_SB;
4330 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4331 bool m_InsideString;
4333 void BeginValue(
bool isString);
4334 void WriteIndent(
bool oneLess =
false);
4337 const char*
const VmaJsonWriter::INDENT =
" ";
4339 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4341 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4342 m_InsideString(false)
4346 VmaJsonWriter::~VmaJsonWriter()
4348 VMA_ASSERT(!m_InsideString);
4349 VMA_ASSERT(m_Stack.empty());
4352 void VmaJsonWriter::BeginObject(
bool singleLine)
4354 VMA_ASSERT(!m_InsideString);
4360 item.type = COLLECTION_TYPE_OBJECT;
4361 item.valueCount = 0;
4362 item.singleLineMode = singleLine;
4363 m_Stack.push_back(item);
4366 void VmaJsonWriter::EndObject()
4368 VMA_ASSERT(!m_InsideString);
4373 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4377 void VmaJsonWriter::BeginArray(
bool singleLine)
4379 VMA_ASSERT(!m_InsideString);
4385 item.type = COLLECTION_TYPE_ARRAY;
4386 item.valueCount = 0;
4387 item.singleLineMode = singleLine;
4388 m_Stack.push_back(item);
4391 void VmaJsonWriter::EndArray()
4393 VMA_ASSERT(!m_InsideString);
4398 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4402 void VmaJsonWriter::WriteString(
const char* pStr)
4408 void VmaJsonWriter::BeginString(
const char* pStr)
4410 VMA_ASSERT(!m_InsideString);
4414 m_InsideString =
true;
4415 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4417 ContinueString(pStr);
4421 void VmaJsonWriter::ContinueString(
const char* pStr)
4423 VMA_ASSERT(m_InsideString);
4425 const size_t strLen = strlen(pStr);
4426 for(
size_t i = 0; i < strLen; ++i)
4459 VMA_ASSERT(0 &&
"Character not currently supported.");
4465 void VmaJsonWriter::ContinueString(uint32_t n)
4467 VMA_ASSERT(m_InsideString);
4471 void VmaJsonWriter::ContinueString(uint64_t n)
4473 VMA_ASSERT(m_InsideString);
4477 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4479 VMA_ASSERT(m_InsideString);
4480 m_SB.AddPointer(ptr);
4483 void VmaJsonWriter::EndString(
const char* pStr)
4485 VMA_ASSERT(m_InsideString);
4486 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4488 ContinueString(pStr);
4491 m_InsideString =
false;
4494 void VmaJsonWriter::WriteNumber(uint32_t n)
4496 VMA_ASSERT(!m_InsideString);
4501 void VmaJsonWriter::WriteNumber(uint64_t n)
4503 VMA_ASSERT(!m_InsideString);
4508 void VmaJsonWriter::WriteBool(
bool b)
4510 VMA_ASSERT(!m_InsideString);
4512 m_SB.Add(b ?
"true" :
"false");
4515 void VmaJsonWriter::WriteNull()
4517 VMA_ASSERT(!m_InsideString);
4522 void VmaJsonWriter::BeginValue(
bool isString)
4524 if(!m_Stack.empty())
4526 StackItem& currItem = m_Stack.back();
4527 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4528 currItem.valueCount % 2 == 0)
4530 VMA_ASSERT(isString);
4533 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4534 currItem.valueCount % 2 != 0)
4538 else if(currItem.valueCount > 0)
4547 ++currItem.valueCount;
4551 void VmaJsonWriter::WriteIndent(
bool oneLess)
4553 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4557 size_t count = m_Stack.size();
4558 if(count > 0 && oneLess)
4562 for(
size_t i = 0; i < count; ++i)
4569 #endif // #if VMA_STATS_STRING_ENABLED 4573 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4575 if(IsUserDataString())
4577 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4579 FreeUserDataString(hAllocator);
4581 if(pUserData != VMA_NULL)
4583 const char*
const newStrSrc = (
char*)pUserData;
4584 const size_t newStrLen = strlen(newStrSrc);
4585 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4586 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4587 m_pUserData = newStrDst;
4592 m_pUserData = pUserData;
4596 void VmaAllocation_T::ChangeBlockAllocation(
4597 VmaAllocator hAllocator,
4598 VmaDeviceMemoryBlock* block,
4599 VkDeviceSize offset)
4601 VMA_ASSERT(block != VMA_NULL);
4602 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4605 if(block != m_BlockAllocation.m_Block)
4607 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4608 if(IsPersistentMap())
4610 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4611 block->Map(hAllocator, mapRefCount, VMA_NULL);
4614 m_BlockAllocation.m_Block = block;
4615 m_BlockAllocation.m_Offset = offset;
4618 VkDeviceSize VmaAllocation_T::GetOffset()
const 4622 case ALLOCATION_TYPE_BLOCK:
4623 return m_BlockAllocation.m_Offset;
4624 case ALLOCATION_TYPE_DEDICATED:
4632 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4636 case ALLOCATION_TYPE_BLOCK:
4637 return m_BlockAllocation.m_Block->m_hMemory;
4638 case ALLOCATION_TYPE_DEDICATED:
4639 return m_DedicatedAllocation.m_hMemory;
4642 return VK_NULL_HANDLE;
4646 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4650 case ALLOCATION_TYPE_BLOCK:
4651 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4652 case ALLOCATION_TYPE_DEDICATED:
4653 return m_DedicatedAllocation.m_MemoryTypeIndex;
4660 void* VmaAllocation_T::GetMappedData()
const 4664 case ALLOCATION_TYPE_BLOCK:
4667 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4668 VMA_ASSERT(pBlockData != VMA_NULL);
4669 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4676 case ALLOCATION_TYPE_DEDICATED:
4677 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4678 return m_DedicatedAllocation.m_pMappedData;
4685 bool VmaAllocation_T::CanBecomeLost()
const 4689 case ALLOCATION_TYPE_BLOCK:
4690 return m_BlockAllocation.m_CanBecomeLost;
4691 case ALLOCATION_TYPE_DEDICATED:
4699 VmaPool VmaAllocation_T::GetPool()
const 4701 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4702 return m_BlockAllocation.m_hPool;
4705 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4707 VMA_ASSERT(CanBecomeLost());
4713 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4716 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4721 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4727 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4737 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4739 VMA_ASSERT(IsUserDataString());
4740 if(m_pUserData != VMA_NULL)
4742 char*
const oldStr = (
char*)m_pUserData;
4743 const size_t oldStrLen = strlen(oldStr);
4744 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4745 m_pUserData = VMA_NULL;
4749 void VmaAllocation_T::BlockAllocMap()
4751 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4753 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4759 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4763 void VmaAllocation_T::BlockAllocUnmap()
4765 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4767 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4773 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4777 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4779 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4783 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4785 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4786 *ppData = m_DedicatedAllocation.m_pMappedData;
4792 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4793 return VK_ERROR_MEMORY_MAP_FAILED;
4798 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4799 hAllocator->m_hDevice,
4800 m_DedicatedAllocation.m_hMemory,
4805 if(result == VK_SUCCESS)
4807 m_DedicatedAllocation.m_pMappedData = *ppData;
4814 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4816 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4818 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4823 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4824 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4825 hAllocator->m_hDevice,
4826 m_DedicatedAllocation.m_hMemory);
4831 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4835 #if VMA_STATS_STRING_ENABLED 4838 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4847 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4851 json.WriteString(
"Blocks");
4854 json.WriteString(
"Allocations");
4857 json.WriteString(
"UnusedRanges");
4860 json.WriteString(
"UsedBytes");
4863 json.WriteString(
"UnusedBytes");
4868 json.WriteString(
"AllocationSize");
4869 json.BeginObject(
true);
4870 json.WriteString(
"Min");
4872 json.WriteString(
"Avg");
4874 json.WriteString(
"Max");
4881 json.WriteString(
"UnusedRangeSize");
4882 json.BeginObject(
true);
4883 json.WriteString(
"Min");
4885 json.WriteString(
"Avg");
4887 json.WriteString(
"Max");
4895 #endif // #if VMA_STATS_STRING_ENABLED 4897 struct VmaSuballocationItemSizeLess
4900 const VmaSuballocationList::iterator lhs,
4901 const VmaSuballocationList::iterator rhs)
const 4903 return lhs->size < rhs->size;
4906 const VmaSuballocationList::iterator lhs,
4907 VkDeviceSize rhsSize)
const 4909 return lhs->size < rhsSize;
4916 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4920 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4921 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4925 VmaBlockMetadata::~VmaBlockMetadata()
4929 void VmaBlockMetadata::Init(VkDeviceSize size)
4933 m_SumFreeSize = size;
4935 VmaSuballocation suballoc = {};
4936 suballoc.offset = 0;
4937 suballoc.size = size;
4938 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4939 suballoc.hAllocation = VK_NULL_HANDLE;
4941 m_Suballocations.push_back(suballoc);
4942 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4944 m_FreeSuballocationsBySize.push_back(suballocItem);
4947 bool VmaBlockMetadata::Validate()
const 4949 if(m_Suballocations.empty())
4955 VkDeviceSize calculatedOffset = 0;
4957 uint32_t calculatedFreeCount = 0;
4959 VkDeviceSize calculatedSumFreeSize = 0;
4962 size_t freeSuballocationsToRegister = 0;
4964 bool prevFree =
false;
4966 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4967 suballocItem != m_Suballocations.cend();
4970 const VmaSuballocation& subAlloc = *suballocItem;
4973 if(subAlloc.offset != calculatedOffset)
4978 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4980 if(prevFree && currFree)
4985 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4992 calculatedSumFreeSize += subAlloc.size;
4993 ++calculatedFreeCount;
4994 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4996 ++freeSuballocationsToRegister;
5001 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5005 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5011 calculatedOffset += subAlloc.size;
5012 prevFree = currFree;
5017 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5022 VkDeviceSize lastSize = 0;
5023 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5025 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5028 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5033 if(suballocItem->size < lastSize)
5038 lastSize = suballocItem->size;
5042 if(!ValidateFreeSuballocationList() ||
5043 (calculatedOffset != m_Size) ||
5044 (calculatedSumFreeSize != m_SumFreeSize) ||
5045 (calculatedFreeCount != m_FreeCount))
5053 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5055 if(!m_FreeSuballocationsBySize.empty())
5057 return m_FreeSuballocationsBySize.back()->size;
5065 bool VmaBlockMetadata::IsEmpty()
const 5067 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5070 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5074 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5086 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5087 suballocItem != m_Suballocations.cend();
5090 const VmaSuballocation& suballoc = *suballocItem;
5091 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5104 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5106 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5108 inoutStats.
size += m_Size;
5115 #if VMA_STATS_STRING_ENABLED 5117 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5121 json.WriteString(
"TotalBytes");
5122 json.WriteNumber(m_Size);
5124 json.WriteString(
"UnusedBytes");
5125 json.WriteNumber(m_SumFreeSize);
5127 json.WriteString(
"Allocations");
5128 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5130 json.WriteString(
"UnusedRanges");
5131 json.WriteNumber(m_FreeCount);
5133 json.WriteString(
"Suballocations");
5136 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5137 suballocItem != m_Suballocations.cend();
5138 ++suballocItem, ++i)
5140 json.BeginObject(
true);
5142 json.WriteString(
"Type");
5143 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5145 json.WriteString(
"Size");
5146 json.WriteNumber(suballocItem->size);
5148 json.WriteString(
"Offset");
5149 json.WriteNumber(suballocItem->offset);
5151 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5153 const void* pUserData = suballocItem->hAllocation->GetUserData();
5154 if(pUserData != VMA_NULL)
5156 json.WriteString(
"UserData");
5157 if(suballocItem->hAllocation->IsUserDataString())
5159 json.WriteString((
const char*)pUserData);
5164 json.ContinueString_Pointer(pUserData);
5177 #endif // #if VMA_STATS_STRING_ENABLED 5189 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5191 VMA_ASSERT(IsEmpty());
5192 pAllocationRequest->offset = 0;
5193 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5194 pAllocationRequest->sumItemSize = 0;
5195 pAllocationRequest->item = m_Suballocations.begin();
5196 pAllocationRequest->itemsToMakeLostCount = 0;
5199 bool VmaBlockMetadata::CreateAllocationRequest(
5200 uint32_t currentFrameIndex,
5201 uint32_t frameInUseCount,
5202 VkDeviceSize bufferImageGranularity,
5203 VkDeviceSize allocSize,
5204 VkDeviceSize allocAlignment,
5205 VmaSuballocationType allocType,
5206 bool canMakeOtherLost,
5207 VmaAllocationRequest* pAllocationRequest)
5209 VMA_ASSERT(allocSize > 0);
5210 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5211 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5212 VMA_HEAVY_ASSERT(Validate());
5215 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5221 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5222 if(freeSuballocCount > 0)
5227 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5228 m_FreeSuballocationsBySize.data(),
5229 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5231 VmaSuballocationItemSizeLess());
5232 size_t index = it - m_FreeSuballocationsBySize.data();
5233 for(; index < freeSuballocCount; ++index)
5238 bufferImageGranularity,
5242 m_FreeSuballocationsBySize[index],
5244 &pAllocationRequest->offset,
5245 &pAllocationRequest->itemsToMakeLostCount,
5246 &pAllocationRequest->sumFreeSize,
5247 &pAllocationRequest->sumItemSize))
5249 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5257 for(
size_t index = freeSuballocCount; index--; )
5262 bufferImageGranularity,
5266 m_FreeSuballocationsBySize[index],
5268 &pAllocationRequest->offset,
5269 &pAllocationRequest->itemsToMakeLostCount,
5270 &pAllocationRequest->sumFreeSize,
5271 &pAllocationRequest->sumItemSize))
5273 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5280 if(canMakeOtherLost)
5284 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5285 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5287 VmaAllocationRequest tmpAllocRequest = {};
5288 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5289 suballocIt != m_Suballocations.end();
5292 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5293 suballocIt->hAllocation->CanBecomeLost())
5298 bufferImageGranularity,
5304 &tmpAllocRequest.offset,
5305 &tmpAllocRequest.itemsToMakeLostCount,
5306 &tmpAllocRequest.sumFreeSize,
5307 &tmpAllocRequest.sumItemSize))
5309 tmpAllocRequest.item = suballocIt;
5311 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5313 *pAllocationRequest = tmpAllocRequest;
5319 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5328 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5329 uint32_t currentFrameIndex,
5330 uint32_t frameInUseCount,
5331 VmaAllocationRequest* pAllocationRequest)
5333 while(pAllocationRequest->itemsToMakeLostCount > 0)
5335 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5337 ++pAllocationRequest->item;
5339 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5340 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5341 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5342 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5344 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5345 --pAllocationRequest->itemsToMakeLostCount;
5353 VMA_HEAVY_ASSERT(Validate());
5354 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5355 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5360 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5362 uint32_t lostAllocationCount = 0;
5363 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5364 it != m_Suballocations.end();
5367 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5368 it->hAllocation->CanBecomeLost() &&
5369 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5371 it = FreeSuballocation(it);
5372 ++lostAllocationCount;
5375 return lostAllocationCount;
5378 void VmaBlockMetadata::Alloc(
5379 const VmaAllocationRequest& request,
5380 VmaSuballocationType type,
5381 VkDeviceSize allocSize,
5382 VmaAllocation hAllocation)
5384 VMA_ASSERT(request.item != m_Suballocations.end());
5385 VmaSuballocation& suballoc = *request.item;
5387 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5389 VMA_ASSERT(request.offset >= suballoc.offset);
5390 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5391 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5392 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5396 UnregisterFreeSuballocation(request.item);
5398 suballoc.offset = request.offset;
5399 suballoc.size = allocSize;
5400 suballoc.type = type;
5401 suballoc.hAllocation = hAllocation;
5406 VmaSuballocation paddingSuballoc = {};
5407 paddingSuballoc.offset = request.offset + allocSize;
5408 paddingSuballoc.size = paddingEnd;
5409 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5410 VmaSuballocationList::iterator next = request.item;
5412 const VmaSuballocationList::iterator paddingEndItem =
5413 m_Suballocations.insert(next, paddingSuballoc);
5414 RegisterFreeSuballocation(paddingEndItem);
5420 VmaSuballocation paddingSuballoc = {};
5421 paddingSuballoc.offset = request.offset - paddingBegin;
5422 paddingSuballoc.size = paddingBegin;
5423 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5424 const VmaSuballocationList::iterator paddingBeginItem =
5425 m_Suballocations.insert(request.item, paddingSuballoc);
5426 RegisterFreeSuballocation(paddingBeginItem);
5430 m_FreeCount = m_FreeCount - 1;
5431 if(paddingBegin > 0)
5439 m_SumFreeSize -= allocSize;
5442 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5444 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5445 suballocItem != m_Suballocations.end();
5448 VmaSuballocation& suballoc = *suballocItem;
5449 if(suballoc.hAllocation == allocation)
5451 FreeSuballocation(suballocItem);
5452 VMA_HEAVY_ASSERT(Validate());
5456 VMA_ASSERT(0 &&
"Not found!");
5459 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5461 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5462 suballocItem != m_Suballocations.end();
5465 VmaSuballocation& suballoc = *suballocItem;
5466 if(suballoc.offset == offset)
5468 FreeSuballocation(suballocItem);
5472 VMA_ASSERT(0 &&
"Not found!");
5475 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5477 VkDeviceSize lastSize = 0;
5478 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5480 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5482 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5487 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5492 if(it->size < lastSize)
5498 lastSize = it->size;
5503 bool VmaBlockMetadata::CheckAllocation(
5504 uint32_t currentFrameIndex,
5505 uint32_t frameInUseCount,
5506 VkDeviceSize bufferImageGranularity,
5507 VkDeviceSize allocSize,
5508 VkDeviceSize allocAlignment,
5509 VmaSuballocationType allocType,
5510 VmaSuballocationList::const_iterator suballocItem,
5511 bool canMakeOtherLost,
5512 VkDeviceSize* pOffset,
5513 size_t* itemsToMakeLostCount,
5514 VkDeviceSize* pSumFreeSize,
5515 VkDeviceSize* pSumItemSize)
const 5517 VMA_ASSERT(allocSize > 0);
5518 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5519 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5520 VMA_ASSERT(pOffset != VMA_NULL);
5522 *itemsToMakeLostCount = 0;
5526 if(canMakeOtherLost)
5528 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5530 *pSumFreeSize = suballocItem->size;
5534 if(suballocItem->hAllocation->CanBecomeLost() &&
5535 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5537 ++*itemsToMakeLostCount;
5538 *pSumItemSize = suballocItem->size;
5547 if(m_Size - suballocItem->offset < allocSize)
5553 *pOffset = suballocItem->offset;
5556 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5558 *pOffset += VMA_DEBUG_MARGIN;
5562 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5563 *pOffset = VmaAlignUp(*pOffset, alignment);
5567 if(bufferImageGranularity > 1)
5569 bool bufferImageGranularityConflict =
false;
5570 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5571 while(prevSuballocItem != m_Suballocations.cbegin())
5574 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5575 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5577 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5579 bufferImageGranularityConflict =
true;
5587 if(bufferImageGranularityConflict)
5589 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5595 if(*pOffset >= suballocItem->offset + suballocItem->size)
5601 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5604 VmaSuballocationList::const_iterator next = suballocItem;
5606 const VkDeviceSize requiredEndMargin =
5607 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5609 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5611 if(suballocItem->offset + totalSize > m_Size)
5618 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5619 if(totalSize > suballocItem->size)
5621 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5622 while(remainingSize > 0)
5625 if(lastSuballocItem == m_Suballocations.cend())
5629 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5631 *pSumFreeSize += lastSuballocItem->size;
5635 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5636 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5637 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5639 ++*itemsToMakeLostCount;
5640 *pSumItemSize += lastSuballocItem->size;
5647 remainingSize = (lastSuballocItem->size < remainingSize) ?
5648 remainingSize - lastSuballocItem->size : 0;
5654 if(bufferImageGranularity > 1)
5656 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5658 while(nextSuballocItem != m_Suballocations.cend())
5660 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5661 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5663 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5665 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5666 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5667 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5669 ++*itemsToMakeLostCount;
5688 const VmaSuballocation& suballoc = *suballocItem;
5689 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5691 *pSumFreeSize = suballoc.size;
5694 if(suballoc.size < allocSize)
5700 *pOffset = suballoc.offset;
5703 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5705 *pOffset += VMA_DEBUG_MARGIN;
5709 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5710 *pOffset = VmaAlignUp(*pOffset, alignment);
5714 if(bufferImageGranularity > 1)
5716 bool bufferImageGranularityConflict =
false;
5717 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5718 while(prevSuballocItem != m_Suballocations.cbegin())
5721 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5722 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5724 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5726 bufferImageGranularityConflict =
true;
5734 if(bufferImageGranularityConflict)
5736 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5741 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5744 VmaSuballocationList::const_iterator next = suballocItem;
5746 const VkDeviceSize requiredEndMargin =
5747 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5750 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5757 if(bufferImageGranularity > 1)
5759 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5761 while(nextSuballocItem != m_Suballocations.cend())
5763 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5764 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5766 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5785 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5787 VMA_ASSERT(item != m_Suballocations.end());
5788 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5790 VmaSuballocationList::iterator nextItem = item;
5792 VMA_ASSERT(nextItem != m_Suballocations.end());
5793 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5795 item->size += nextItem->size;
5797 m_Suballocations.erase(nextItem);
5800 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5803 VmaSuballocation& suballoc = *suballocItem;
5804 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5805 suballoc.hAllocation = VK_NULL_HANDLE;
5809 m_SumFreeSize += suballoc.size;
5812 bool mergeWithNext =
false;
5813 bool mergeWithPrev =
false;
5815 VmaSuballocationList::iterator nextItem = suballocItem;
5817 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5819 mergeWithNext =
true;
5822 VmaSuballocationList::iterator prevItem = suballocItem;
5823 if(suballocItem != m_Suballocations.begin())
5826 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5828 mergeWithPrev =
true;
5834 UnregisterFreeSuballocation(nextItem);
5835 MergeFreeWithNext(suballocItem);
5840 UnregisterFreeSuballocation(prevItem);
5841 MergeFreeWithNext(prevItem);
5842 RegisterFreeSuballocation(prevItem);
5847 RegisterFreeSuballocation(suballocItem);
5848 return suballocItem;
5852 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5854 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5855 VMA_ASSERT(item->size > 0);
5859 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5861 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5863 if(m_FreeSuballocationsBySize.empty())
5865 m_FreeSuballocationsBySize.push_back(item);
5869 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5877 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5879 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5880 VMA_ASSERT(item->size > 0);
5884 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5886 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5888 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5889 m_FreeSuballocationsBySize.data(),
5890 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5892 VmaSuballocationItemSizeLess());
5893 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5894 index < m_FreeSuballocationsBySize.size();
5897 if(m_FreeSuballocationsBySize[index] == item)
5899 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5902 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5904 VMA_ASSERT(0 &&
"Not found.");
5913 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5915 m_pMappedData(VMA_NULL)
5919 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5921 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5924 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData)
5931 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5934 m_MapCount += count;
5935 VMA_ASSERT(m_pMappedData != VMA_NULL);
5936 if(ppData != VMA_NULL)
5938 *ppData = m_pMappedData;
5944 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5945 hAllocator->m_hDevice,
5951 if(result == VK_SUCCESS)
5953 if(ppData != VMA_NULL)
5955 *ppData = m_pMappedData;
5963 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count)
5970 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5971 if(m_MapCount >= count)
5973 m_MapCount -= count;
5976 m_pMappedData = VMA_NULL;
5977 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
5982 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
5989 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5990 m_MemoryTypeIndex(UINT32_MAX),
5991 m_hMemory(VK_NULL_HANDLE),
5992 m_Metadata(hAllocator)
5996 void VmaDeviceMemoryBlock::Init(
5997 uint32_t newMemoryTypeIndex,
5998 VkDeviceMemory newMemory,
5999 VkDeviceSize newSize)
6001 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6003 m_MemoryTypeIndex = newMemoryTypeIndex;
6004 m_hMemory = newMemory;
6006 m_Metadata.Init(newSize);
6009 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
6013 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6015 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6016 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6017 m_hMemory = VK_NULL_HANDLE;
6020 bool VmaDeviceMemoryBlock::Validate()
const 6022 if((m_hMemory == VK_NULL_HANDLE) ||
6023 (m_Metadata.GetSize() == 0))
6028 return m_Metadata.Validate();
6031 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count,
void** ppData)
6033 return m_Mapping.Map(hAllocator, m_hMemory, count, ppData);
6036 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
6038 m_Mapping.Unmap(hAllocator, m_hMemory, count);
6043 memset(&outInfo, 0,
sizeof(outInfo));
6062 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6070 VmaPool_T::VmaPool_T(
6071 VmaAllocator hAllocator,
6075 createInfo.memoryTypeIndex,
6076 createInfo.blockSize,
6077 createInfo.minBlockCount,
6078 createInfo.maxBlockCount,
6080 createInfo.frameInUseCount,
6085 VmaPool_T::~VmaPool_T()
6089 #if VMA_STATS_STRING_ENABLED 6091 #endif // #if VMA_STATS_STRING_ENABLED 6093 VmaBlockVector::VmaBlockVector(
6094 VmaAllocator hAllocator,
6095 uint32_t memoryTypeIndex,
6096 VkDeviceSize preferredBlockSize,
6097 size_t minBlockCount,
6098 size_t maxBlockCount,
6099 VkDeviceSize bufferImageGranularity,
6100 uint32_t frameInUseCount,
6101 bool isCustomPool) :
6102 m_hAllocator(hAllocator),
6103 m_MemoryTypeIndex(memoryTypeIndex),
6104 m_PreferredBlockSize(preferredBlockSize),
6105 m_MinBlockCount(minBlockCount),
6106 m_MaxBlockCount(maxBlockCount),
6107 m_BufferImageGranularity(bufferImageGranularity),
6108 m_FrameInUseCount(frameInUseCount),
6109 m_IsCustomPool(isCustomPool),
6110 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6111 m_HasEmptyBlock(false),
6112 m_pDefragmentator(VMA_NULL)
6116 VmaBlockVector::~VmaBlockVector()
6118 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6120 for(
size_t i = m_Blocks.size(); i--; )
6122 m_Blocks[i]->Destroy(m_hAllocator);
6123 vma_delete(m_hAllocator, m_Blocks[i]);
6127 VkResult VmaBlockVector::CreateMinBlocks()
6129 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6131 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6132 if(res != VK_SUCCESS)
6140 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6148 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6150 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6152 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6154 VMA_HEAVY_ASSERT(pBlock->Validate());
6155 pBlock->m_Metadata.AddPoolStats(*pStats);
6159 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6161 VkResult VmaBlockVector::Allocate(
6162 VmaPool hCurrentPool,
6163 uint32_t currentFrameIndex,
6164 const VkMemoryRequirements& vkMemReq,
6166 VmaSuballocationType suballocType,
6167 VmaAllocation* pAllocation)
6172 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6176 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6178 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6179 VMA_ASSERT(pCurrBlock);
6180 VmaAllocationRequest currRequest = {};
6181 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6184 m_BufferImageGranularity,
6192 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6196 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6197 if(res != VK_SUCCESS)
6204 if(pCurrBlock->m_Metadata.IsEmpty())
6206 m_HasEmptyBlock =
false;
6209 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6210 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6211 (*pAllocation)->InitBlockAllocation(
6220 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6221 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6222 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6227 const bool canCreateNewBlock =
6229 (m_Blocks.size() < m_MaxBlockCount);
6232 if(canCreateNewBlock)
6235 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6236 uint32_t newBlockSizeShift = 0;
6237 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6241 if(m_IsCustomPool ==
false)
6244 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6245 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6247 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6248 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6250 newBlockSize = smallerNewBlockSize;
6251 ++newBlockSizeShift;
6260 size_t newBlockIndex = 0;
6261 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6263 if(m_IsCustomPool ==
false)
6265 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6267 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6268 if(smallerNewBlockSize >= vkMemReq.size)
6270 newBlockSize = smallerNewBlockSize;
6271 ++newBlockSizeShift;
6272 res = CreateBlock(newBlockSize, &newBlockIndex);
6281 if(res == VK_SUCCESS)
6283 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6284 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6288 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6289 if(res != VK_SUCCESS)
6296 VmaAllocationRequest allocRequest;
6297 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6298 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6299 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6300 (*pAllocation)->InitBlockAllocation(
6303 allocRequest.offset,
6309 VMA_HEAVY_ASSERT(pBlock->Validate());
6310 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6311 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6319 if(canMakeOtherLost)
6321 uint32_t tryIndex = 0;
6322 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6324 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6325 VmaAllocationRequest bestRequest = {};
6326 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6330 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6332 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6333 VMA_ASSERT(pCurrBlock);
6334 VmaAllocationRequest currRequest = {};
6335 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6338 m_BufferImageGranularity,
6345 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6346 if(pBestRequestBlock == VMA_NULL ||
6347 currRequestCost < bestRequestCost)
6349 pBestRequestBlock = pCurrBlock;
6350 bestRequest = currRequest;
6351 bestRequestCost = currRequestCost;
6353 if(bestRequestCost == 0)
6361 if(pBestRequestBlock != VMA_NULL)
6365 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6366 if(res != VK_SUCCESS)
6372 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6378 if(pBestRequestBlock->m_Metadata.IsEmpty())
6380 m_HasEmptyBlock =
false;
6383 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6384 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6385 (*pAllocation)->InitBlockAllocation(
6394 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6395 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6396 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6410 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6412 return VK_ERROR_TOO_MANY_OBJECTS;
6416 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6419 void VmaBlockVector::Free(
6420 VmaAllocation hAllocation)
6422 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6426 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6428 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6430 if(hAllocation->IsPersistentMap())
6432 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory, 1);
6435 pBlock->m_Metadata.Free(hAllocation);
6436 VMA_HEAVY_ASSERT(pBlock->Validate());
6438 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6441 if(pBlock->m_Metadata.IsEmpty())
6444 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6446 pBlockToDelete = pBlock;
6452 m_HasEmptyBlock =
true;
6457 else if(m_HasEmptyBlock)
6459 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6460 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6462 pBlockToDelete = pLastBlock;
6463 m_Blocks.pop_back();
6464 m_HasEmptyBlock =
false;
6468 IncrementallySortBlocks();
6473 if(pBlockToDelete != VMA_NULL)
6475 VMA_DEBUG_LOG(
" Deleted empty allocation");
6476 pBlockToDelete->Destroy(m_hAllocator);
6477 vma_delete(m_hAllocator, pBlockToDelete);
6481 size_t VmaBlockVector::CalcMaxBlockSize()
const 6484 for(
size_t i = m_Blocks.size(); i--; )
6486 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6487 if(result >= m_PreferredBlockSize)
6495 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6497 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6499 if(m_Blocks[blockIndex] == pBlock)
6501 VmaVectorRemove(m_Blocks, blockIndex);
6508 void VmaBlockVector::IncrementallySortBlocks()
6511 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6513 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6515 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6521 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6523 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6524 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6525 allocInfo.allocationSize = blockSize;
6526 VkDeviceMemory mem = VK_NULL_HANDLE;
6527 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6536 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6540 allocInfo.allocationSize);
6542 m_Blocks.push_back(pBlock);
6543 if(pNewBlockIndex != VMA_NULL)
6545 *pNewBlockIndex = m_Blocks.size() - 1;
6551 #if VMA_STATS_STRING_ENABLED 6553 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6555 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6561 json.WriteString(
"MemoryTypeIndex");
6562 json.WriteNumber(m_MemoryTypeIndex);
6564 json.WriteString(
"BlockSize");
6565 json.WriteNumber(m_PreferredBlockSize);
6567 json.WriteString(
"BlockCount");
6568 json.BeginObject(
true);
6569 if(m_MinBlockCount > 0)
6571 json.WriteString(
"Min");
6572 json.WriteNumber((uint64_t)m_MinBlockCount);
6574 if(m_MaxBlockCount < SIZE_MAX)
6576 json.WriteString(
"Max");
6577 json.WriteNumber((uint64_t)m_MaxBlockCount);
6579 json.WriteString(
"Cur");
6580 json.WriteNumber((uint64_t)m_Blocks.size());
6583 if(m_FrameInUseCount > 0)
6585 json.WriteString(
"FrameInUseCount");
6586 json.WriteNumber(m_FrameInUseCount);
6591 json.WriteString(
"PreferredBlockSize");
6592 json.WriteNumber(m_PreferredBlockSize);
6595 json.WriteString(
"Blocks");
6597 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6599 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6606 #endif // #if VMA_STATS_STRING_ENABLED 6608 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6609 VmaAllocator hAllocator,
6610 uint32_t currentFrameIndex)
6612 if(m_pDefragmentator == VMA_NULL)
6614 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6620 return m_pDefragmentator;
6623 VkResult VmaBlockVector::Defragment(
6625 VkDeviceSize& maxBytesToMove,
6626 uint32_t& maxAllocationsToMove)
6628 if(m_pDefragmentator == VMA_NULL)
6633 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6636 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6639 if(pDefragmentationStats != VMA_NULL)
6641 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6642 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6645 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6646 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6652 m_HasEmptyBlock =
false;
6653 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6655 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6656 if(pBlock->m_Metadata.IsEmpty())
6658 if(m_Blocks.size() > m_MinBlockCount)
6660 if(pDefragmentationStats != VMA_NULL)
6663 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6666 VmaVectorRemove(m_Blocks, blockIndex);
6667 pBlock->Destroy(m_hAllocator);
6668 vma_delete(m_hAllocator, pBlock);
6672 m_HasEmptyBlock =
true;
6680 void VmaBlockVector::DestroyDefragmentator()
6682 if(m_pDefragmentator != VMA_NULL)
6684 vma_delete(m_hAllocator, m_pDefragmentator);
6685 m_pDefragmentator = VMA_NULL;
6689 void VmaBlockVector::MakePoolAllocationsLost(
6690 uint32_t currentFrameIndex,
6691 size_t* pLostAllocationCount)
6693 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6694 size_t lostAllocationCount = 0;
6695 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6697 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6699 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6701 if(pLostAllocationCount != VMA_NULL)
6703 *pLostAllocationCount = lostAllocationCount;
6707 void VmaBlockVector::AddStats(
VmaStats* pStats)
6709 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6710 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6712 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6714 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6716 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6718 VMA_HEAVY_ASSERT(pBlock->Validate());
6720 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6721 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6722 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6723 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6730 VmaDefragmentator::VmaDefragmentator(
6731 VmaAllocator hAllocator,
6732 VmaBlockVector* pBlockVector,
6733 uint32_t currentFrameIndex) :
6734 m_hAllocator(hAllocator),
6735 m_pBlockVector(pBlockVector),
6736 m_CurrentFrameIndex(currentFrameIndex),
6738 m_AllocationsMoved(0),
6739 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6740 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6744 VmaDefragmentator::~VmaDefragmentator()
6746 for(
size_t i = m_Blocks.size(); i--; )
6748 vma_delete(m_hAllocator, m_Blocks[i]);
6752 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6754 AllocationInfo allocInfo;
6755 allocInfo.m_hAllocation = hAlloc;
6756 allocInfo.m_pChanged = pChanged;
6757 m_Allocations.push_back(allocInfo);
6760 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6763 if(m_pMappedDataForDefragmentation)
6765 *ppMappedData = m_pMappedDataForDefragmentation;
6770 if(m_pBlock->m_Mapping.GetMappedData())
6772 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6777 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
6778 *ppMappedData = m_pMappedDataForDefragmentation;
6782 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6784 if(m_pMappedDataForDefragmentation != VMA_NULL)
6786 m_pBlock->Unmap(hAllocator, 1);
6790 VkResult VmaDefragmentator::DefragmentRound(
6791 VkDeviceSize maxBytesToMove,
6792 uint32_t maxAllocationsToMove)
6794 if(m_Blocks.empty())
6799 size_t srcBlockIndex = m_Blocks.size() - 1;
6800 size_t srcAllocIndex = SIZE_MAX;
6806 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6808 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6811 if(srcBlockIndex == 0)
6818 srcAllocIndex = SIZE_MAX;
6823 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6827 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6828 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6830 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6831 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6832 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6833 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6836 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6838 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6839 VmaAllocationRequest dstAllocRequest;
6840 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6841 m_CurrentFrameIndex,
6842 m_pBlockVector->GetFrameInUseCount(),
6843 m_pBlockVector->GetBufferImageGranularity(),
6848 &dstAllocRequest) &&
6850 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6852 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6855 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6856 (m_BytesMoved + size > maxBytesToMove))
6858 return VK_INCOMPLETE;
6861 void* pDstMappedData = VMA_NULL;
6862 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6863 if(res != VK_SUCCESS)
6868 void* pSrcMappedData = VMA_NULL;
6869 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6870 if(res != VK_SUCCESS)
6877 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6878 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6879 static_cast<size_t>(size));
6881 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6882 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
6884 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6886 if(allocInfo.m_pChanged != VMA_NULL)
6888 *allocInfo.m_pChanged = VK_TRUE;
6891 ++m_AllocationsMoved;
6892 m_BytesMoved += size;
6894 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6902 if(srcAllocIndex > 0)
6908 if(srcBlockIndex > 0)
6911 srcAllocIndex = SIZE_MAX;
6921 VkResult VmaDefragmentator::Defragment(
6922 VkDeviceSize maxBytesToMove,
6923 uint32_t maxAllocationsToMove)
6925 if(m_Allocations.empty())
6931 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6932 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6934 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6935 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6936 m_Blocks.push_back(pBlockInfo);
6940 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6943 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6945 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6947 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6949 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6950 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6951 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6953 (*it)->m_Allocations.push_back(allocInfo);
6961 m_Allocations.clear();
6963 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6965 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6966 pBlockInfo->CalcHasNonMovableAllocations();
6967 pBlockInfo->SortAllocationsBySizeDescecnding();
6971 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6974 VkResult result = VK_SUCCESS;
6975 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6977 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6981 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6983 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6989 bool VmaDefragmentator::MoveMakesSense(
6990 size_t dstBlockIndex, VkDeviceSize dstOffset,
6991 size_t srcBlockIndex, VkDeviceSize srcOffset)
6993 if(dstBlockIndex < srcBlockIndex)
6997 if(dstBlockIndex > srcBlockIndex)
7001 if(dstOffset < srcOffset)
7014 m_hDevice(pCreateInfo->device),
7015 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7016 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7017 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7018 m_PreferredLargeHeapBlockSize(0),
7019 m_PhysicalDevice(pCreateInfo->physicalDevice),
7020 m_CurrentFrameIndex(0),
7021 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
7025 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7026 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7027 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7029 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7030 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7032 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7034 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7045 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7046 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7053 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7055 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7056 if(limit != VK_WHOLE_SIZE)
7058 m_HeapSizeLimit[heapIndex] = limit;
7059 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7061 m_MemProps.memoryHeaps[heapIndex].size = limit;
7067 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7069 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7071 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7077 GetBufferImageGranularity(),
7082 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7086 VmaAllocator_T::~VmaAllocator_T()
7088 VMA_ASSERT(m_Pools.empty());
7090 for(
size_t i = GetMemoryTypeCount(); i--; )
7092 vma_delete(
this, m_pDedicatedAllocations[i]);
7093 vma_delete(
this, m_pBlockVectors[i]);
7097 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7099 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7100 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7101 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7102 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7103 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7104 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7105 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7106 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7107 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7108 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7109 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7110 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7111 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7112 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7113 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7114 if(m_UseKhrDedicatedAllocation)
7116 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7117 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7118 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7119 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7121 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7123 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7124 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7126 if(pVulkanFunctions != VMA_NULL)
7128 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7129 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7130 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7131 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7132 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7133 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7134 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7135 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7136 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7137 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7138 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7139 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7140 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7141 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7142 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7143 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7146 #undef VMA_COPY_IF_NOT_NULL 7150 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7151 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7152 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7153 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7154 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7155 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7156 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7157 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7158 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7159 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7160 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7161 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7162 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7163 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7164 if(m_UseKhrDedicatedAllocation)
7166 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7167 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7171 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7173 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7174 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7175 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7176 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7179 VkResult VmaAllocator_T::AllocateMemoryOfType(
7180 const VkMemoryRequirements& vkMemReq,
7181 bool dedicatedAllocation,
7182 VkBuffer dedicatedBuffer,
7183 VkImage dedicatedImage,
7185 uint32_t memTypeIndex,
7186 VmaSuballocationType suballocType,
7187 VmaAllocation* pAllocation)
7189 VMA_ASSERT(pAllocation != VMA_NULL);
7190 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7196 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7201 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7202 VMA_ASSERT(blockVector);
7204 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7205 bool preferDedicatedMemory =
7206 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7207 dedicatedAllocation ||
7209 vkMemReq.size > preferredBlockSize / 2;
7211 if(preferDedicatedMemory &&
7213 finalCreateInfo.
pool == VK_NULL_HANDLE)
7222 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7226 return AllocateDedicatedMemory(
7240 VkResult res = blockVector->Allocate(
7242 m_CurrentFrameIndex.load(),
7247 if(res == VK_SUCCESS)
7255 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7259 res = AllocateDedicatedMemory(
7265 finalCreateInfo.pUserData,
7269 if(res == VK_SUCCESS)
7272 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7278 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7285 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7287 VmaSuballocationType suballocType,
7288 uint32_t memTypeIndex,
7290 bool isUserDataString,
7292 VkBuffer dedicatedBuffer,
7293 VkImage dedicatedImage,
7294 VmaAllocation* pAllocation)
7296 VMA_ASSERT(pAllocation);
7298 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7299 allocInfo.memoryTypeIndex = memTypeIndex;
7300 allocInfo.allocationSize = size;
7302 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7303 if(m_UseKhrDedicatedAllocation)
7305 if(dedicatedBuffer != VK_NULL_HANDLE)
7307 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7308 dedicatedAllocInfo.buffer = dedicatedBuffer;
7309 allocInfo.pNext = &dedicatedAllocInfo;
7311 else if(dedicatedImage != VK_NULL_HANDLE)
7313 dedicatedAllocInfo.image = dedicatedImage;
7314 allocInfo.pNext = &dedicatedAllocInfo;
7319 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7320 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7323 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7327 void* pMappedData = VMA_NULL;
7330 res = (*m_VulkanFunctions.vkMapMemory)(
7339 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7340 FreeVulkanMemory(memTypeIndex, size, hMemory);
7345 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7346 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7347 (*pAllocation)->SetUserData(
this, pUserData);
7351 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7352 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7353 VMA_ASSERT(pDedicatedAllocations);
7354 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7357 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7362 void VmaAllocator_T::GetBufferMemoryRequirements(
7364 VkMemoryRequirements& memReq,
7365 bool& requiresDedicatedAllocation,
7366 bool& prefersDedicatedAllocation)
const 7368 if(m_UseKhrDedicatedAllocation)
7370 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7371 memReqInfo.buffer = hBuffer;
7373 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7375 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7376 memReq2.pNext = &memDedicatedReq;
7378 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7380 memReq = memReq2.memoryRequirements;
7381 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7382 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7386 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7387 requiresDedicatedAllocation =
false;
7388 prefersDedicatedAllocation =
false;
7392 void VmaAllocator_T::GetImageMemoryRequirements(
7394 VkMemoryRequirements& memReq,
7395 bool& requiresDedicatedAllocation,
7396 bool& prefersDedicatedAllocation)
const 7398 if(m_UseKhrDedicatedAllocation)
7400 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7401 memReqInfo.image = hImage;
7403 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7405 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7406 memReq2.pNext = &memDedicatedReq;
7408 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7410 memReq = memReq2.memoryRequirements;
7411 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7412 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7416 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7417 requiresDedicatedAllocation =
false;
7418 prefersDedicatedAllocation =
false;
7422 VkResult VmaAllocator_T::AllocateMemory(
7423 const VkMemoryRequirements& vkMemReq,
7424 bool requiresDedicatedAllocation,
7425 bool prefersDedicatedAllocation,
7426 VkBuffer dedicatedBuffer,
7427 VkImage dedicatedImage,
7429 VmaSuballocationType suballocType,
7430 VmaAllocation* pAllocation)
7435 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7436 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7441 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7442 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7444 if(requiresDedicatedAllocation)
7448 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7449 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7451 if(createInfo.
pool != VK_NULL_HANDLE)
7453 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7454 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7457 if((createInfo.
pool != VK_NULL_HANDLE) &&
7460 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7461 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7464 if(createInfo.
pool != VK_NULL_HANDLE)
7466 return createInfo.
pool->m_BlockVector.Allocate(
7468 m_CurrentFrameIndex.load(),
7477 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7478 uint32_t memTypeIndex = UINT32_MAX;
7480 if(res == VK_SUCCESS)
7482 res = AllocateMemoryOfType(
7484 requiresDedicatedAllocation || prefersDedicatedAllocation,
7492 if(res == VK_SUCCESS)
7502 memoryTypeBits &= ~(1u << memTypeIndex);
7505 if(res == VK_SUCCESS)
7507 res = AllocateMemoryOfType(
7509 requiresDedicatedAllocation || prefersDedicatedAllocation,
7517 if(res == VK_SUCCESS)
7527 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7538 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7540 VMA_ASSERT(allocation);
7542 if(allocation->CanBecomeLost() ==
false ||
7543 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7545 switch(allocation->GetType())
7547 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7549 VmaBlockVector* pBlockVector = VMA_NULL;
7550 VmaPool hPool = allocation->GetPool();
7551 if(hPool != VK_NULL_HANDLE)
7553 pBlockVector = &hPool->m_BlockVector;
7557 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7558 pBlockVector = m_pBlockVectors[memTypeIndex];
7560 pBlockVector->Free(allocation);
7563 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7564 FreeDedicatedMemory(allocation);
7571 allocation->SetUserData(
this, VMA_NULL);
7572 vma_delete(
this, allocation);
7575 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7578 InitStatInfo(pStats->
total);
7579 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7581 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7585 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7587 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7588 VMA_ASSERT(pBlockVector);
7589 pBlockVector->AddStats(pStats);
7594 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7595 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7597 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7602 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7604 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7605 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7606 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7607 VMA_ASSERT(pDedicatedAllocVector);
7608 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7611 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7612 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7613 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7614 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7619 VmaPostprocessCalcStatInfo(pStats->
total);
7620 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7621 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7622 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7623 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7626 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7628 VkResult VmaAllocator_T::Defragment(
7629 VmaAllocation* pAllocations,
7630 size_t allocationCount,
7631 VkBool32* pAllocationsChanged,
7635 if(pAllocationsChanged != VMA_NULL)
7637 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7639 if(pDefragmentationStats != VMA_NULL)
7641 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7644 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7646 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7648 const size_t poolCount = m_Pools.size();
7651 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7653 VmaAllocation hAlloc = pAllocations[allocIndex];
7655 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7657 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7659 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7661 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7663 VmaBlockVector* pAllocBlockVector = VMA_NULL;
7665 const VmaPool hAllocPool = hAlloc->GetPool();
7667 if(hAllocPool != VK_NULL_HANDLE)
7669 pAllocBlockVector = &hAllocPool->GetBlockVector();
7674 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7677 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7679 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7680 &pAllocationsChanged[allocIndex] : VMA_NULL;
7681 pDefragmentator->AddAllocation(hAlloc, pChanged);
7685 VkResult result = VK_SUCCESS;
7689 VkDeviceSize maxBytesToMove = SIZE_MAX;
7690 uint32_t maxAllocationsToMove = UINT32_MAX;
7691 if(pDefragmentationInfo != VMA_NULL)
7698 for(uint32_t memTypeIndex = 0;
7699 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7703 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7705 result = m_pBlockVectors[memTypeIndex]->Defragment(
7706 pDefragmentationStats,
7708 maxAllocationsToMove);
7713 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7715 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7716 pDefragmentationStats,
7718 maxAllocationsToMove);
7724 for(
size_t poolIndex = poolCount; poolIndex--; )
7726 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7730 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7732 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7734 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7741 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7743 if(hAllocation->CanBecomeLost())
7749 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7750 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7753 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7757 pAllocationInfo->
offset = 0;
7758 pAllocationInfo->
size = hAllocation->GetSize();
7760 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7763 else if(localLastUseFrameIndex == localCurrFrameIndex)
7765 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7766 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7767 pAllocationInfo->
offset = hAllocation->GetOffset();
7768 pAllocationInfo->
size = hAllocation->GetSize();
7770 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7775 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7777 localLastUseFrameIndex = localCurrFrameIndex;
7784 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7785 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7786 pAllocationInfo->
offset = hAllocation->GetOffset();
7787 pAllocationInfo->
size = hAllocation->GetSize();
7788 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7789 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7793 bool VmaAllocator_T::TouchAllocation(VmaAllocation hAllocation)
7796 if(hAllocation->CanBecomeLost())
7798 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7799 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7802 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7806 else if(localLastUseFrameIndex == localCurrFrameIndex)
7812 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7814 localLastUseFrameIndex = localCurrFrameIndex;
7825 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7827 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7840 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7842 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7843 if(res != VK_SUCCESS)
7845 vma_delete(
this, *pPool);
7852 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7853 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7859 void VmaAllocator_T::DestroyPool(VmaPool pool)
7863 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7864 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7865 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7868 vma_delete(
this, pool);
7871 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7873 pool->m_BlockVector.GetPoolStats(pPoolStats);
7876 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7878 m_CurrentFrameIndex.store(frameIndex);
7881 void VmaAllocator_T::MakePoolAllocationsLost(
7883 size_t* pLostAllocationCount)
7885 hPool->m_BlockVector.MakePoolAllocationsLost(
7886 m_CurrentFrameIndex.load(),
7887 pLostAllocationCount);
7890 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7892 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7893 (*pAllocation)->InitLost();
7896 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7898 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7901 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7903 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7904 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7906 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7907 if(res == VK_SUCCESS)
7909 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7914 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7919 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7922 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7924 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7930 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7932 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7934 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7937 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7939 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7940 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7942 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7943 m_HeapSizeLimit[heapIndex] += size;
7947 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
7949 if(hAllocation->CanBecomeLost())
7951 return VK_ERROR_MEMORY_MAP_FAILED;
7954 switch(hAllocation->GetType())
7956 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7958 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7959 char *pBytes = VMA_NULL;
7960 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
7961 if(res == VK_SUCCESS)
7963 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
7964 hAllocation->BlockAllocMap();
7968 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7969 return hAllocation->DedicatedAllocMap(
this, ppData);
7972 return VK_ERROR_MEMORY_MAP_FAILED;
7976 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
7978 switch(hAllocation->GetType())
7980 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7982 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7983 hAllocation->BlockAllocUnmap();
7984 pBlock->Unmap(
this, 1);
7987 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7988 hAllocation->DedicatedAllocUnmap(
this);
7995 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7997 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7999 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8001 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8002 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8003 VMA_ASSERT(pDedicatedAllocations);
8004 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8005 VMA_ASSERT(success);
8008 VkDeviceMemory hMemory = allocation->GetMemory();
8010 if(allocation->GetMappedData() != VMA_NULL)
8012 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8015 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8017 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8020 #if VMA_STATS_STRING_ENABLED 8022 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8024 bool dedicatedAllocationsStarted =
false;
8025 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8027 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8028 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8029 VMA_ASSERT(pDedicatedAllocVector);
8030 if(pDedicatedAllocVector->empty() ==
false)
8032 if(dedicatedAllocationsStarted ==
false)
8034 dedicatedAllocationsStarted =
true;
8035 json.WriteString(
"DedicatedAllocations");
8039 json.BeginString(
"Type ");
8040 json.ContinueString(memTypeIndex);
8045 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8047 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
8048 json.BeginObject(
true);
8050 json.WriteString(
"Type");
8051 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
8053 json.WriteString(
"Size");
8054 json.WriteNumber(hAlloc->GetSize());
8056 const void* pUserData = hAlloc->GetUserData();
8057 if(pUserData != VMA_NULL)
8059 json.WriteString(
"UserData");
8060 if(hAlloc->IsUserDataString())
8062 json.WriteString((
const char*)pUserData);
8067 json.ContinueString_Pointer(pUserData);
8078 if(dedicatedAllocationsStarted)
8084 bool allocationsStarted =
false;
8085 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8087 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8089 if(allocationsStarted ==
false)
8091 allocationsStarted =
true;
8092 json.WriteString(
"DefaultPools");
8096 json.BeginString(
"Type ");
8097 json.ContinueString(memTypeIndex);
8100 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8103 if(allocationsStarted)
8110 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8111 const size_t poolCount = m_Pools.size();
8114 json.WriteString(
"Pools");
8116 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8118 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8125 #endif // #if VMA_STATS_STRING_ENABLED 8127 static VkResult AllocateMemoryForImage(
8128 VmaAllocator allocator,
8131 VmaSuballocationType suballocType,
8132 VmaAllocation* pAllocation)
8134 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8136 VkMemoryRequirements vkMemReq = {};
8137 bool requiresDedicatedAllocation =
false;
8138 bool prefersDedicatedAllocation =
false;
8139 allocator->GetImageMemoryRequirements(image, vkMemReq,
8140 requiresDedicatedAllocation, prefersDedicatedAllocation);
8142 return allocator->AllocateMemory(
8144 requiresDedicatedAllocation,
8145 prefersDedicatedAllocation,
8148 *pAllocationCreateInfo,
8158 VmaAllocator* pAllocator)
8160 VMA_ASSERT(pCreateInfo && pAllocator);
8161 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8167 VmaAllocator allocator)
8169 if(allocator != VK_NULL_HANDLE)
8171 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8172 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8173 vma_delete(&allocationCallbacks, allocator);
8178 VmaAllocator allocator,
8179 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8181 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8182 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8186 VmaAllocator allocator,
8187 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8189 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8190 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8194 VmaAllocator allocator,
8195 uint32_t memoryTypeIndex,
8196 VkMemoryPropertyFlags* pFlags)
8198 VMA_ASSERT(allocator && pFlags);
8199 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8200 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8204 VmaAllocator allocator,
8205 uint32_t frameIndex)
8207 VMA_ASSERT(allocator);
8208 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8210 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8212 allocator->SetCurrentFrameIndex(frameIndex);
8216 VmaAllocator allocator,
8219 VMA_ASSERT(allocator && pStats);
8220 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8221 allocator->CalculateStats(pStats);
8224 #if VMA_STATS_STRING_ENABLED 8227 VmaAllocator allocator,
8228 char** ppStatsString,
8229 VkBool32 detailedMap)
8231 VMA_ASSERT(allocator && ppStatsString);
8232 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8234 VmaStringBuilder sb(allocator);
8236 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8240 allocator->CalculateStats(&stats);
8242 json.WriteString(
"Total");
8243 VmaPrintStatInfo(json, stats.
total);
8245 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8247 json.BeginString(
"Heap ");
8248 json.ContinueString(heapIndex);
8252 json.WriteString(
"Size");
8253 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8255 json.WriteString(
"Flags");
8256 json.BeginArray(
true);
8257 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8259 json.WriteString(
"DEVICE_LOCAL");
8265 json.WriteString(
"Stats");
8266 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8269 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8271 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8273 json.BeginString(
"Type ");
8274 json.ContinueString(typeIndex);
8279 json.WriteString(
"Flags");
8280 json.BeginArray(
true);
8281 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8282 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8284 json.WriteString(
"DEVICE_LOCAL");
8286 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8288 json.WriteString(
"HOST_VISIBLE");
8290 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8292 json.WriteString(
"HOST_COHERENT");
8294 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8296 json.WriteString(
"HOST_CACHED");
8298 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8300 json.WriteString(
"LAZILY_ALLOCATED");
8306 json.WriteString(
"Stats");
8307 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8316 if(detailedMap == VK_TRUE)
8318 allocator->PrintDetailedMap(json);
8324 const size_t len = sb.GetLength();
8325 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8328 memcpy(pChars, sb.GetData(), len);
8331 *ppStatsString = pChars;
8335 VmaAllocator allocator,
8338 if(pStatsString != VMA_NULL)
8340 VMA_ASSERT(allocator);
8341 size_t len = strlen(pStatsString);
8342 vma_delete_array(allocator, pStatsString, len + 1);
8346 #endif // #if VMA_STATS_STRING_ENABLED 8352 VmaAllocator allocator,
8353 uint32_t memoryTypeBits,
8355 uint32_t* pMemoryTypeIndex)
8357 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8358 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8359 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8366 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8370 switch(pAllocationCreateInfo->
usage)
8375 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8378 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8381 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8382 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8385 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8386 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8392 *pMemoryTypeIndex = UINT32_MAX;
8393 uint32_t minCost = UINT32_MAX;
8394 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8395 memTypeIndex < allocator->GetMemoryTypeCount();
8396 ++memTypeIndex, memTypeBit <<= 1)
8399 if((memTypeBit & memoryTypeBits) != 0)
8401 const VkMemoryPropertyFlags currFlags =
8402 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8404 if((requiredFlags & ~currFlags) == 0)
8407 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8409 if(currCost < minCost)
8411 *pMemoryTypeIndex = memTypeIndex;
8421 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8425 VmaAllocator allocator,
8426 const VkBufferCreateInfo* pBufferCreateInfo,
8428 uint32_t* pMemoryTypeIndex)
8430 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8431 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8432 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8433 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8435 const VkDevice hDev = allocator->m_hDevice;
8436 VkBuffer hBuffer = VK_NULL_HANDLE;
8437 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
8438 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
8439 if(res == VK_SUCCESS)
8441 VkMemoryRequirements memReq = {};
8442 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
8443 hDev, hBuffer, &memReq);
8447 memReq.memoryTypeBits,
8448 pAllocationCreateInfo,
8451 allocator->GetVulkanFunctions().vkDestroyBuffer(
8452 hDev, hBuffer, allocator->GetAllocationCallbacks());
8458 VmaAllocator allocator,
8459 const VkImageCreateInfo* pImageCreateInfo,
8461 uint32_t* pMemoryTypeIndex)
8463 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8464 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
8465 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8466 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8468 const VkDevice hDev = allocator->m_hDevice;
8469 VkImage hImage = VK_NULL_HANDLE;
8470 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
8471 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
8472 if(res == VK_SUCCESS)
8474 VkMemoryRequirements memReq = {};
8475 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
8476 hDev, hImage, &memReq);
8480 memReq.memoryTypeBits,
8481 pAllocationCreateInfo,
8484 allocator->GetVulkanFunctions().vkDestroyImage(
8485 hDev, hImage, allocator->GetAllocationCallbacks());
8491 VmaAllocator allocator,
8495 VMA_ASSERT(allocator && pCreateInfo && pPool);
8497 VMA_DEBUG_LOG(
"vmaCreatePool");
8499 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8501 return allocator->CreatePool(pCreateInfo, pPool);
8505 VmaAllocator allocator,
8508 VMA_ASSERT(allocator);
8510 if(pool == VK_NULL_HANDLE)
8515 VMA_DEBUG_LOG(
"vmaDestroyPool");
8517 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8519 allocator->DestroyPool(pool);
8523 VmaAllocator allocator,
8527 VMA_ASSERT(allocator && pool && pPoolStats);
8529 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8531 allocator->GetPoolStats(pool, pPoolStats);
8535 VmaAllocator allocator,
8537 size_t* pLostAllocationCount)
8539 VMA_ASSERT(allocator && pool);
8541 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8543 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8547 VmaAllocator allocator,
8548 const VkMemoryRequirements* pVkMemoryRequirements,
8550 VmaAllocation* pAllocation,
8553 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8555 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8557 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8559 VkResult result = allocator->AllocateMemory(
8560 *pVkMemoryRequirements,
8566 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8569 if(pAllocationInfo && result == VK_SUCCESS)
8571 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8578 VmaAllocator allocator,
8581 VmaAllocation* pAllocation,
8584 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8586 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8588 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8590 VkMemoryRequirements vkMemReq = {};
8591 bool requiresDedicatedAllocation =
false;
8592 bool prefersDedicatedAllocation =
false;
8593 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8594 requiresDedicatedAllocation,
8595 prefersDedicatedAllocation);
8597 VkResult result = allocator->AllocateMemory(
8599 requiresDedicatedAllocation,
8600 prefersDedicatedAllocation,
8604 VMA_SUBALLOCATION_TYPE_BUFFER,
8607 if(pAllocationInfo && result == VK_SUCCESS)
8609 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8616 VmaAllocator allocator,
8619 VmaAllocation* pAllocation,
8622 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8624 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8626 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8628 VkResult result = AllocateMemoryForImage(
8632 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8635 if(pAllocationInfo && result == VK_SUCCESS)
8637 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8644 VmaAllocator allocator,
8645 VmaAllocation allocation)
8647 VMA_ASSERT(allocator && allocation);
8649 VMA_DEBUG_LOG(
"vmaFreeMemory");
8651 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8653 allocator->FreeMemory(allocation);
8657 VmaAllocator allocator,
8658 VmaAllocation allocation,
8661 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8663 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8665 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8669 VmaAllocator allocator,
8670 VmaAllocation allocation)
8672 VMA_ASSERT(allocator && allocation);
8674 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8676 return allocator->TouchAllocation(allocation);
8680 VmaAllocator allocator,
8681 VmaAllocation allocation,
8684 VMA_ASSERT(allocator && allocation);
8686 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8688 allocation->SetUserData(allocator, pUserData);
8692 VmaAllocator allocator,
8693 VmaAllocation* pAllocation)
8695 VMA_ASSERT(allocator && pAllocation);
8697 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8699 allocator->CreateLostAllocation(pAllocation);
8703 VmaAllocator allocator,
8704 VmaAllocation allocation,
8707 VMA_ASSERT(allocator && allocation && ppData);
8709 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8711 return allocator->Map(allocation, ppData);
8715 VmaAllocator allocator,
8716 VmaAllocation allocation)
8718 VMA_ASSERT(allocator && allocation);
8720 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8722 allocator->Unmap(allocation);
8726 VmaAllocator allocator,
8727 VmaAllocation* pAllocations,
8728 size_t allocationCount,
8729 VkBool32* pAllocationsChanged,
8733 VMA_ASSERT(allocator && pAllocations);
8735 VMA_DEBUG_LOG(
"vmaDefragment");
8737 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8739 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8743 VmaAllocator allocator,
8744 const VkBufferCreateInfo* pBufferCreateInfo,
8747 VmaAllocation* pAllocation,
8750 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8752 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8754 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8756 *pBuffer = VK_NULL_HANDLE;
8757 *pAllocation = VK_NULL_HANDLE;
8760 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8761 allocator->m_hDevice,
8763 allocator->GetAllocationCallbacks(),
8768 VkMemoryRequirements vkMemReq = {};
8769 bool requiresDedicatedAllocation =
false;
8770 bool prefersDedicatedAllocation =
false;
8771 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8772 requiresDedicatedAllocation, prefersDedicatedAllocation);
8776 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8778 VMA_ASSERT(vkMemReq.alignment %
8779 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8781 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8783 VMA_ASSERT(vkMemReq.alignment %
8784 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8786 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8788 VMA_ASSERT(vkMemReq.alignment %
8789 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8793 res = allocator->AllocateMemory(
8795 requiresDedicatedAllocation,
8796 prefersDedicatedAllocation,
8799 *pAllocationCreateInfo,
8800 VMA_SUBALLOCATION_TYPE_BUFFER,
8805 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8806 allocator->m_hDevice,
8808 (*pAllocation)->GetMemory(),
8809 (*pAllocation)->GetOffset());
8813 if(pAllocationInfo != VMA_NULL)
8815 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8819 allocator->FreeMemory(*pAllocation);
8820 *pAllocation = VK_NULL_HANDLE;
8821 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8822 *pBuffer = VK_NULL_HANDLE;
8825 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8826 *pBuffer = VK_NULL_HANDLE;
8833 VmaAllocator allocator,
8835 VmaAllocation allocation)
8837 if(buffer != VK_NULL_HANDLE)
8839 VMA_ASSERT(allocator);
8841 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8843 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8845 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8847 allocator->FreeMemory(allocation);
8852 VmaAllocator allocator,
8853 const VkImageCreateInfo* pImageCreateInfo,
8856 VmaAllocation* pAllocation,
8859 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8861 VMA_DEBUG_LOG(
"vmaCreateImage");
8863 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8865 *pImage = VK_NULL_HANDLE;
8866 *pAllocation = VK_NULL_HANDLE;
8869 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8870 allocator->m_hDevice,
8872 allocator->GetAllocationCallbacks(),
8876 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8877 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8878 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8881 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8885 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8886 allocator->m_hDevice,
8888 (*pAllocation)->GetMemory(),
8889 (*pAllocation)->GetOffset());
8893 if(pAllocationInfo != VMA_NULL)
8895 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8899 allocator->FreeMemory(*pAllocation);
8900 *pAllocation = VK_NULL_HANDLE;
8901 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8902 *pImage = VK_NULL_HANDLE;
8905 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8906 *pImage = VK_NULL_HANDLE;
8913 VmaAllocator allocator,
8915 VmaAllocation allocation)
8917 if(image != VK_NULL_HANDLE)
8919 VMA_ASSERT(allocator);
8921 VMA_DEBUG_LOG(
"vmaDestroyImage");
8923 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8925 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8927 allocator->FreeMemory(allocation);
8931 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:896
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1150
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:921
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:906
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1107
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:900
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1456
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:918
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1631
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1326
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1380
Definition: vk_mem_alloc.h:1187
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:889
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1225
Definition: vk_mem_alloc.h:1134
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:930
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:983
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:915
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1138
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1048
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:903
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1047
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:911
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1635
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:947
VmaStatInfo total
Definition: vk_mem_alloc.h:1057
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1643
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1209
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1626
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:904
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:831
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:924
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1334
Definition: vk_mem_alloc.h:1328
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1466
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:901
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1246
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1350
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1386
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:887
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1337
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1085
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1621
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1639
Definition: vk_mem_alloc.h:1124
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1233
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:902
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1053
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:837
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:858
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:863
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1641
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1220
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1396
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:897
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1036
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1345
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:850
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1194
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1049
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:854
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1340
Definition: vk_mem_alloc.h:1133
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1215
Definition: vk_mem_alloc.h:1206
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1039
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:899
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1358
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:933
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1389
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1204
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1239
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:971
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1055
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1174
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1048
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:908
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:852
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:907
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1372
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1480
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:927
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1048
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1045
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1377
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1461
Definition: vk_mem_alloc.h:1202
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1637
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:895
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:910
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1043
Definition: vk_mem_alloc.h:1090
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1330
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1041
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:905
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:909
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1161
Definition: vk_mem_alloc.h:1117
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1475
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:885
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:898
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1442
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1308
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1049
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
TODO finish documentation...
Definition: vk_mem_alloc.h:1200
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1056
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1383
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1049
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1447