23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 688 #include <vulkan/vulkan.h> 690 VK_DEFINE_HANDLE(VmaAllocator)
694 VmaAllocator allocator,
696 VkDeviceMemory memory,
700 VmaAllocator allocator,
702 VkDeviceMemory memory,
854 VmaAllocator* pAllocator);
858 VmaAllocator allocator);
865 VmaAllocator allocator,
866 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
873 VmaAllocator allocator,
874 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
883 VmaAllocator allocator,
884 uint32_t memoryTypeIndex,
885 VkMemoryPropertyFlags* pFlags);
896 VmaAllocator allocator,
897 uint32_t frameIndex);
927 VmaAllocator allocator,
930 #define VMA_STATS_STRING_ENABLED 1 932 #if VMA_STATS_STRING_ENABLED 938 VmaAllocator allocator,
939 char** ppStatsString,
940 VkBool32 detailedMap);
943 VmaAllocator allocator,
946 #endif // #if VMA_STATS_STRING_ENABLED 948 VK_DEFINE_HANDLE(VmaPool)
1119 VmaAllocator allocator,
1120 uint32_t memoryTypeBits,
1122 uint32_t* pMemoryTypeIndex);
1223 VmaAllocator allocator,
1230 VmaAllocator allocator,
1240 VmaAllocator allocator,
1251 VmaAllocator allocator,
1253 size_t* pLostAllocationCount);
1255 VK_DEFINE_HANDLE(VmaAllocation)
1311 VmaAllocator allocator,
1312 const VkMemoryRequirements* pVkMemoryRequirements,
1314 VmaAllocation* pAllocation,
1324 VmaAllocator allocator,
1327 VmaAllocation* pAllocation,
1332 VmaAllocator allocator,
1335 VmaAllocation* pAllocation,
1340 VmaAllocator allocator,
1341 VmaAllocation allocation);
1345 VmaAllocator allocator,
1346 VmaAllocation allocation,
1363 VmaAllocator allocator,
1364 VmaAllocation allocation,
1378 VmaAllocator allocator,
1379 VmaAllocation* pAllocation);
1416 VmaAllocator allocator,
1417 VmaAllocation allocation,
1425 VmaAllocator allocator,
1426 VmaAllocation allocation);
1531 VmaAllocator allocator,
1532 VmaAllocation* pAllocations,
1533 size_t allocationCount,
1534 VkBool32* pAllocationsChanged,
1565 VmaAllocator allocator,
1566 const VkBufferCreateInfo* pBufferCreateInfo,
1569 VmaAllocation* pAllocation,
1584 VmaAllocator allocator,
1586 VmaAllocation allocation);
1590 VmaAllocator allocator,
1591 const VkImageCreateInfo* pImageCreateInfo,
1594 VmaAllocation* pAllocation,
1609 VmaAllocator allocator,
1611 VmaAllocation allocation);
1617 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1620 #ifdef __INTELLISENSE__ 1621 #define VMA_IMPLEMENTATION 1624 #ifdef VMA_IMPLEMENTATION 1625 #undef VMA_IMPLEMENTATION 1647 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1648 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1660 #if VMA_USE_STL_CONTAINERS 1661 #define VMA_USE_STL_VECTOR 1 1662 #define VMA_USE_STL_UNORDERED_MAP 1 1663 #define VMA_USE_STL_LIST 1 1666 #if VMA_USE_STL_VECTOR 1670 #if VMA_USE_STL_UNORDERED_MAP 1671 #include <unordered_map> 1674 #if VMA_USE_STL_LIST 1683 #include <algorithm> 1687 #if !defined(_WIN32) 1694 #define VMA_ASSERT(expr) assert(expr) 1696 #define VMA_ASSERT(expr) 1702 #ifndef VMA_HEAVY_ASSERT 1704 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1706 #define VMA_HEAVY_ASSERT(expr) 1712 #define VMA_NULL nullptr 1715 #ifndef VMA_ALIGN_OF 1716 #define VMA_ALIGN_OF(type) (__alignof(type)) 1719 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1721 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1723 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1727 #ifndef VMA_SYSTEM_FREE 1729 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1731 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1736 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1740 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1744 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1748 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1751 #ifndef VMA_DEBUG_LOG 1752 #define VMA_DEBUG_LOG(format, ...) 1762 #if VMA_STATS_STRING_ENABLED 1763 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1765 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1767 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1769 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1771 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1773 snprintf(outStr, strLen,
"%p", ptr);
1783 void Lock() { m_Mutex.lock(); }
1784 void Unlock() { m_Mutex.unlock(); }
1788 #define VMA_MUTEX VmaMutex 1799 #ifndef VMA_ATOMIC_UINT32 1800 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1803 #ifndef VMA_BEST_FIT 1816 #define VMA_BEST_FIT (1) 1819 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 1824 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 1827 #ifndef VMA_DEBUG_ALIGNMENT 1832 #define VMA_DEBUG_ALIGNMENT (1) 1835 #ifndef VMA_DEBUG_MARGIN 1840 #define VMA_DEBUG_MARGIN (0) 1843 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1848 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1851 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1856 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1859 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1860 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1864 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1865 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1869 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1870 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1874 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1880 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1881 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1884 static inline uint32_t VmaCountBitsSet(uint32_t v)
1886 uint32_t c = v - ((v >> 1) & 0x55555555);
1887 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1888 c = ((c >> 4) + c) & 0x0F0F0F0F;
1889 c = ((c >> 8) + c) & 0x00FF00FF;
1890 c = ((c >> 16) + c) & 0x0000FFFF;
1896 template <
typename T>
1897 static inline T VmaAlignUp(T val, T align)
1899 return (val + align - 1) / align * align;
1903 template <
typename T>
1904 inline T VmaRoundDiv(T x, T y)
1906 return (x + (y / (T)2)) / y;
1911 template<
typename Iterator,
typename Compare>
1912 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1914 Iterator centerValue = end; --centerValue;
1915 Iterator insertIndex = beg;
1916 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1918 if(cmp(*memTypeIndex, *centerValue))
1920 if(insertIndex != memTypeIndex)
1922 VMA_SWAP(*memTypeIndex, *insertIndex);
1927 if(insertIndex != centerValue)
1929 VMA_SWAP(*insertIndex, *centerValue);
1934 template<
typename Iterator,
typename Compare>
1935 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1939 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1940 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1941 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1945 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1947 #endif // #ifndef VMA_SORT 1956 static inline bool VmaBlocksOnSamePage(
1957 VkDeviceSize resourceAOffset,
1958 VkDeviceSize resourceASize,
1959 VkDeviceSize resourceBOffset,
1960 VkDeviceSize pageSize)
1962 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1963 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1964 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1965 VkDeviceSize resourceBStart = resourceBOffset;
1966 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1967 return resourceAEndPage == resourceBStartPage;
1970 enum VmaSuballocationType
1972 VMA_SUBALLOCATION_TYPE_FREE = 0,
1973 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1974 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1975 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1976 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1977 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1978 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1987 static inline bool VmaIsBufferImageGranularityConflict(
1988 VmaSuballocationType suballocType1,
1989 VmaSuballocationType suballocType2)
1991 if(suballocType1 > suballocType2)
1993 VMA_SWAP(suballocType1, suballocType2);
1996 switch(suballocType1)
1998 case VMA_SUBALLOCATION_TYPE_FREE:
2000 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2002 case VMA_SUBALLOCATION_TYPE_BUFFER:
2004 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2005 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2006 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2008 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2009 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2010 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2011 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2013 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2014 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2026 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2027 m_pMutex(useMutex ? &mutex : VMA_NULL)
2044 VMA_MUTEX* m_pMutex;
2047 #if VMA_DEBUG_GLOBAL_MUTEX 2048 static VMA_MUTEX gDebugGlobalMutex;
2049 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2051 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2055 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2066 template <
typename IterT,
typename KeyT,
typename CmpT>
2067 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2069 size_t down = 0, up = (end - beg);
2072 const size_t mid = (down + up) / 2;
2073 if(cmp(*(beg+mid), key))
2088 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2090 if((pAllocationCallbacks != VMA_NULL) &&
2091 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2093 return (*pAllocationCallbacks->pfnAllocation)(
2094 pAllocationCallbacks->pUserData,
2097 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2101 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2105 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2107 if((pAllocationCallbacks != VMA_NULL) &&
2108 (pAllocationCallbacks->pfnFree != VMA_NULL))
2110 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2114 VMA_SYSTEM_FREE(ptr);
2118 template<
typename T>
2119 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2121 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2124 template<
typename T>
2125 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2127 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2130 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2132 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2134 template<
typename T>
2135 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2138 VmaFree(pAllocationCallbacks, ptr);
2141 template<
typename T>
2142 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2146 for(
size_t i = count; i--; )
2150 VmaFree(pAllocationCallbacks, ptr);
2155 template<
typename T>
2156 class VmaStlAllocator
2159 const VkAllocationCallbacks*
const m_pCallbacks;
2160 typedef T value_type;
2162 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2163 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2165 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2166 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2168 template<
typename U>
2169 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2171 return m_pCallbacks == rhs.m_pCallbacks;
2173 template<
typename U>
2174 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2176 return m_pCallbacks != rhs.m_pCallbacks;
2179 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2182 #if VMA_USE_STL_VECTOR 2184 #define VmaVector std::vector 2186 template<
typename T,
typename allocatorT>
2187 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2189 vec.insert(vec.begin() + index, item);
2192 template<
typename T,
typename allocatorT>
2193 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2195 vec.erase(vec.begin() + index);
2198 #else // #if VMA_USE_STL_VECTOR 2203 template<
typename T,
typename AllocatorT>
2207 typedef T value_type;
2209 VmaVector(
const AllocatorT& allocator) :
2210 m_Allocator(allocator),
2217 VmaVector(
size_t count,
const AllocatorT& allocator) :
2218 m_Allocator(allocator),
2219 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2225 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2226 m_Allocator(src.m_Allocator),
2227 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2228 m_Count(src.m_Count),
2229 m_Capacity(src.m_Count)
2233 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2239 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2242 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2246 resize(rhs.m_Count);
2249 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2255 bool empty()
const {
return m_Count == 0; }
2256 size_t size()
const {
return m_Count; }
2257 T* data() {
return m_pArray; }
2258 const T* data()
const {
return m_pArray; }
2260 T& operator[](
size_t index)
2262 VMA_HEAVY_ASSERT(index < m_Count);
2263 return m_pArray[index];
2265 const T& operator[](
size_t index)
const 2267 VMA_HEAVY_ASSERT(index < m_Count);
2268 return m_pArray[index];
2273 VMA_HEAVY_ASSERT(m_Count > 0);
2276 const T& front()
const 2278 VMA_HEAVY_ASSERT(m_Count > 0);
2283 VMA_HEAVY_ASSERT(m_Count > 0);
2284 return m_pArray[m_Count - 1];
2286 const T& back()
const 2288 VMA_HEAVY_ASSERT(m_Count > 0);
2289 return m_pArray[m_Count - 1];
2292 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2294 newCapacity = VMA_MAX(newCapacity, m_Count);
2296 if((newCapacity < m_Capacity) && !freeMemory)
2298 newCapacity = m_Capacity;
2301 if(newCapacity != m_Capacity)
2303 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2306 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2308 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2309 m_Capacity = newCapacity;
2310 m_pArray = newArray;
2314 void resize(
size_t newCount,
bool freeMemory =
false)
2316 size_t newCapacity = m_Capacity;
2317 if(newCount > m_Capacity)
2319 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2323 newCapacity = newCount;
2326 if(newCapacity != m_Capacity)
2328 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2329 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2330 if(elementsToCopy != 0)
2332 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2334 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2335 m_Capacity = newCapacity;
2336 m_pArray = newArray;
2342 void clear(
bool freeMemory =
false)
2344 resize(0, freeMemory);
2347 void insert(
size_t index,
const T& src)
2349 VMA_HEAVY_ASSERT(index <= m_Count);
2350 const size_t oldCount = size();
2351 resize(oldCount + 1);
2352 if(index < oldCount)
2354 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2356 m_pArray[index] = src;
2359 void remove(
size_t index)
2361 VMA_HEAVY_ASSERT(index < m_Count);
2362 const size_t oldCount = size();
2363 if(index < oldCount - 1)
2365 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2367 resize(oldCount - 1);
2370 void push_back(
const T& src)
2372 const size_t newIndex = size();
2373 resize(newIndex + 1);
2374 m_pArray[newIndex] = src;
2379 VMA_HEAVY_ASSERT(m_Count > 0);
2383 void push_front(
const T& src)
2390 VMA_HEAVY_ASSERT(m_Count > 0);
2394 typedef T* iterator;
2396 iterator begin() {
return m_pArray; }
2397 iterator end() {
return m_pArray + m_Count; }
2400 AllocatorT m_Allocator;
2406 template<
typename T,
typename allocatorT>
2407 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2409 vec.insert(index, item);
2412 template<
typename T,
typename allocatorT>
2413 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2418 #endif // #if VMA_USE_STL_VECTOR 2420 template<
typename CmpLess,
typename VectorT>
2421 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2423 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2425 vector.data() + vector.size(),
2427 CmpLess()) - vector.data();
2428 VmaVectorInsert(vector, indexToInsert, value);
2429 return indexToInsert;
2432 template<
typename CmpLess,
typename VectorT>
2433 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2436 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2441 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2443 size_t indexToRemove = it - vector.begin();
2444 VmaVectorRemove(vector, indexToRemove);
2450 template<
typename CmpLess,
typename VectorT>
2451 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2454 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2456 vector.data() + vector.size(),
2459 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2461 return it - vector.begin();
2465 return vector.size();
2477 template<
typename T>
2478 class VmaPoolAllocator
2481 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2482 ~VmaPoolAllocator();
2490 uint32_t NextFreeIndex;
2497 uint32_t FirstFreeIndex;
2500 const VkAllocationCallbacks* m_pAllocationCallbacks;
2501 size_t m_ItemsPerBlock;
2502 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2504 ItemBlock& CreateNewBlock();
2507 template<
typename T>
2508 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2509 m_pAllocationCallbacks(pAllocationCallbacks),
2510 m_ItemsPerBlock(itemsPerBlock),
2511 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2513 VMA_ASSERT(itemsPerBlock > 0);
2516 template<
typename T>
2517 VmaPoolAllocator<T>::~VmaPoolAllocator()
2522 template<
typename T>
2523 void VmaPoolAllocator<T>::Clear()
2525 for(
size_t i = m_ItemBlocks.size(); i--; )
2526 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2527 m_ItemBlocks.clear();
2530 template<
typename T>
2531 T* VmaPoolAllocator<T>::Alloc()
2533 for(
size_t i = m_ItemBlocks.size(); i--; )
2535 ItemBlock& block = m_ItemBlocks[i];
2537 if(block.FirstFreeIndex != UINT32_MAX)
2539 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2540 block.FirstFreeIndex = pItem->NextFreeIndex;
2541 return &pItem->Value;
2546 ItemBlock& newBlock = CreateNewBlock();
2547 Item*
const pItem = &newBlock.pItems[0];
2548 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2549 return &pItem->Value;
2552 template<
typename T>
2553 void VmaPoolAllocator<T>::Free(T* ptr)
2556 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2558 ItemBlock& block = m_ItemBlocks[i];
2562 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2565 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2567 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2568 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2569 block.FirstFreeIndex = index;
2573 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2576 template<
typename T>
2577 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2579 ItemBlock newBlock = {
2580 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2582 m_ItemBlocks.push_back(newBlock);
2585 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2586 newBlock.pItems[i].NextFreeIndex = i + 1;
2587 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2588 return m_ItemBlocks.back();
2594 #if VMA_USE_STL_LIST 2596 #define VmaList std::list 2598 #else // #if VMA_USE_STL_LIST 2600 template<
typename T>
2609 template<
typename T>
2613 typedef VmaListItem<T> ItemType;
2615 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2619 size_t GetCount()
const {
return m_Count; }
2620 bool IsEmpty()
const {
return m_Count == 0; }
2622 ItemType* Front() {
return m_pFront; }
2623 const ItemType* Front()
const {
return m_pFront; }
2624 ItemType* Back() {
return m_pBack; }
2625 const ItemType* Back()
const {
return m_pBack; }
2627 ItemType* PushBack();
2628 ItemType* PushFront();
2629 ItemType* PushBack(
const T& value);
2630 ItemType* PushFront(
const T& value);
2635 ItemType* InsertBefore(ItemType* pItem);
2637 ItemType* InsertAfter(ItemType* pItem);
2639 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2640 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2642 void Remove(ItemType* pItem);
2645 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2646 VmaPoolAllocator<ItemType> m_ItemAllocator;
2652 VmaRawList(
const VmaRawList<T>& src);
2653 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2656 template<
typename T>
2657 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2658 m_pAllocationCallbacks(pAllocationCallbacks),
2659 m_ItemAllocator(pAllocationCallbacks, 128),
2666 template<
typename T>
2667 VmaRawList<T>::~VmaRawList()
2673 template<
typename T>
2674 void VmaRawList<T>::Clear()
2676 if(IsEmpty() ==
false)
2678 ItemType* pItem = m_pBack;
2679 while(pItem != VMA_NULL)
2681 ItemType*
const pPrevItem = pItem->pPrev;
2682 m_ItemAllocator.Free(pItem);
2685 m_pFront = VMA_NULL;
2691 template<
typename T>
2692 VmaListItem<T>* VmaRawList<T>::PushBack()
2694 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2695 pNewItem->pNext = VMA_NULL;
2698 pNewItem->pPrev = VMA_NULL;
2699 m_pFront = pNewItem;
2705 pNewItem->pPrev = m_pBack;
2706 m_pBack->pNext = pNewItem;
2713 template<
typename T>
2714 VmaListItem<T>* VmaRawList<T>::PushFront()
2716 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2717 pNewItem->pPrev = VMA_NULL;
2720 pNewItem->pNext = VMA_NULL;
2721 m_pFront = pNewItem;
2727 pNewItem->pNext = m_pFront;
2728 m_pFront->pPrev = pNewItem;
2729 m_pFront = pNewItem;
2735 template<
typename T>
2736 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2738 ItemType*
const pNewItem = PushBack();
2739 pNewItem->Value = value;
2743 template<
typename T>
2744 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2746 ItemType*
const pNewItem = PushFront();
2747 pNewItem->Value = value;
2751 template<
typename T>
2752 void VmaRawList<T>::PopBack()
2754 VMA_HEAVY_ASSERT(m_Count > 0);
2755 ItemType*
const pBackItem = m_pBack;
2756 ItemType*
const pPrevItem = pBackItem->pPrev;
2757 if(pPrevItem != VMA_NULL)
2759 pPrevItem->pNext = VMA_NULL;
2761 m_pBack = pPrevItem;
2762 m_ItemAllocator.Free(pBackItem);
2766 template<
typename T>
2767 void VmaRawList<T>::PopFront()
2769 VMA_HEAVY_ASSERT(m_Count > 0);
2770 ItemType*
const pFrontItem = m_pFront;
2771 ItemType*
const pNextItem = pFrontItem->pNext;
2772 if(pNextItem != VMA_NULL)
2774 pNextItem->pPrev = VMA_NULL;
2776 m_pFront = pNextItem;
2777 m_ItemAllocator.Free(pFrontItem);
2781 template<
typename T>
2782 void VmaRawList<T>::Remove(ItemType* pItem)
2784 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2785 VMA_HEAVY_ASSERT(m_Count > 0);
2787 if(pItem->pPrev != VMA_NULL)
2789 pItem->pPrev->pNext = pItem->pNext;
2793 VMA_HEAVY_ASSERT(m_pFront == pItem);
2794 m_pFront = pItem->pNext;
2797 if(pItem->pNext != VMA_NULL)
2799 pItem->pNext->pPrev = pItem->pPrev;
2803 VMA_HEAVY_ASSERT(m_pBack == pItem);
2804 m_pBack = pItem->pPrev;
2807 m_ItemAllocator.Free(pItem);
2811 template<
typename T>
2812 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2814 if(pItem != VMA_NULL)
2816 ItemType*
const prevItem = pItem->pPrev;
2817 ItemType*
const newItem = m_ItemAllocator.Alloc();
2818 newItem->pPrev = prevItem;
2819 newItem->pNext = pItem;
2820 pItem->pPrev = newItem;
2821 if(prevItem != VMA_NULL)
2823 prevItem->pNext = newItem;
2827 VMA_HEAVY_ASSERT(m_pFront == pItem);
2837 template<
typename T>
2838 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2840 if(pItem != VMA_NULL)
2842 ItemType*
const nextItem = pItem->pNext;
2843 ItemType*
const newItem = m_ItemAllocator.Alloc();
2844 newItem->pNext = nextItem;
2845 newItem->pPrev = pItem;
2846 pItem->pNext = newItem;
2847 if(nextItem != VMA_NULL)
2849 nextItem->pPrev = newItem;
2853 VMA_HEAVY_ASSERT(m_pBack == pItem);
2863 template<
typename T>
2864 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2866 ItemType*
const newItem = InsertBefore(pItem);
2867 newItem->Value = value;
2871 template<
typename T>
2872 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2874 ItemType*
const newItem = InsertAfter(pItem);
2875 newItem->Value = value;
2879 template<
typename T,
typename AllocatorT>
2892 T& operator*()
const 2894 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2895 return m_pItem->Value;
2897 T* operator->()
const 2899 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2900 return &m_pItem->Value;
2903 iterator& operator++()
2905 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2906 m_pItem = m_pItem->pNext;
2909 iterator& operator--()
2911 if(m_pItem != VMA_NULL)
2913 m_pItem = m_pItem->pPrev;
2917 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2918 m_pItem = m_pList->Back();
2923 iterator operator++(
int)
2925 iterator result = *
this;
2929 iterator operator--(
int)
2931 iterator result = *
this;
2936 bool operator==(
const iterator& rhs)
const 2938 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2939 return m_pItem == rhs.m_pItem;
2941 bool operator!=(
const iterator& rhs)
const 2943 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2944 return m_pItem != rhs.m_pItem;
2948 VmaRawList<T>* m_pList;
2949 VmaListItem<T>* m_pItem;
2951 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2957 friend class VmaList<T, AllocatorT>;
2960 class const_iterator
2969 const_iterator(
const iterator& src) :
2970 m_pList(src.m_pList),
2971 m_pItem(src.m_pItem)
2975 const T& operator*()
const 2977 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2978 return m_pItem->Value;
2980 const T* operator->()
const 2982 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2983 return &m_pItem->Value;
2986 const_iterator& operator++()
2988 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2989 m_pItem = m_pItem->pNext;
2992 const_iterator& operator--()
2994 if(m_pItem != VMA_NULL)
2996 m_pItem = m_pItem->pPrev;
3000 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3001 m_pItem = m_pList->Back();
3006 const_iterator operator++(
int)
3008 const_iterator result = *
this;
3012 const_iterator operator--(
int)
3014 const_iterator result = *
this;
3019 bool operator==(
const const_iterator& rhs)
const 3021 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3022 return m_pItem == rhs.m_pItem;
3024 bool operator!=(
const const_iterator& rhs)
const 3026 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3027 return m_pItem != rhs.m_pItem;
3031 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3037 const VmaRawList<T>* m_pList;
3038 const VmaListItem<T>* m_pItem;
3040 friend class VmaList<T, AllocatorT>;
3043 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3045 bool empty()
const {
return m_RawList.IsEmpty(); }
3046 size_t size()
const {
return m_RawList.GetCount(); }
3048 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3049 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3051 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3052 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3054 void clear() { m_RawList.Clear(); }
3055 void push_back(
const T& value) { m_RawList.PushBack(value); }
3056 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3057 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3060 VmaRawList<T> m_RawList;
3063 #endif // #if VMA_USE_STL_LIST 3071 #if VMA_USE_STL_UNORDERED_MAP 3073 #define VmaPair std::pair 3075 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3076 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3078 #else // #if VMA_USE_STL_UNORDERED_MAP 3080 template<
typename T1,
typename T2>
3086 VmaPair() : first(), second() { }
3087 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3093 template<
typename KeyT,
typename ValueT>
3097 typedef VmaPair<KeyT, ValueT> PairType;
3098 typedef PairType* iterator;
3100 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3102 iterator begin() {
return m_Vector.begin(); }
3103 iterator end() {
return m_Vector.end(); }
3105 void insert(
const PairType& pair);
3106 iterator find(
const KeyT& key);
3107 void erase(iterator it);
3110 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3113 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3115 template<
typename FirstT,
typename SecondT>
3116 struct VmaPairFirstLess
3118 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3120 return lhs.first < rhs.first;
3122 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3124 return lhs.first < rhsFirst;
3128 template<
typename KeyT,
typename ValueT>
3129 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3131 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3133 m_Vector.data() + m_Vector.size(),
3135 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3136 VmaVectorInsert(m_Vector, indexToInsert, pair);
3139 template<
typename KeyT,
typename ValueT>
3140 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3142 PairType* it = VmaBinaryFindFirstNotLess(
3144 m_Vector.data() + m_Vector.size(),
3146 VmaPairFirstLess<KeyT, ValueT>());
3147 if((it != m_Vector.end()) && (it->first == key))
3153 return m_Vector.end();
3157 template<
typename KeyT,
typename ValueT>
3158 void VmaMap<KeyT, ValueT>::erase(iterator it)
3160 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3163 #endif // #if VMA_USE_STL_UNORDERED_MAP 3169 class VmaDeviceMemoryBlock;
3171 struct VmaAllocation_T
3174 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3178 FLAG_USER_DATA_STRING = 0x01,
3182 enum ALLOCATION_TYPE
3184 ALLOCATION_TYPE_NONE,
3185 ALLOCATION_TYPE_BLOCK,
3186 ALLOCATION_TYPE_DEDICATED,
3189 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3192 m_pUserData(VMA_NULL),
3193 m_LastUseFrameIndex(currentFrameIndex),
3194 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3195 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3197 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3203 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3206 VMA_ASSERT(m_pUserData == VMA_NULL);
3209 void InitBlockAllocation(
3211 VmaDeviceMemoryBlock* block,
3212 VkDeviceSize offset,
3213 VkDeviceSize alignment,
3215 VmaSuballocationType suballocationType,
3219 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3220 VMA_ASSERT(block != VMA_NULL);
3221 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3222 m_Alignment = alignment;
3224 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3225 m_SuballocationType = (uint8_t)suballocationType;
3226 m_BlockAllocation.m_hPool = hPool;
3227 m_BlockAllocation.m_Block = block;
3228 m_BlockAllocation.m_Offset = offset;
3229 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3234 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3235 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3236 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3237 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3238 m_BlockAllocation.m_Block = VMA_NULL;
3239 m_BlockAllocation.m_Offset = 0;
3240 m_BlockAllocation.m_CanBecomeLost =
true;
3243 void ChangeBlockAllocation(
3244 VmaDeviceMemoryBlock* block,
3245 VkDeviceSize offset)
3247 VMA_ASSERT(block != VMA_NULL);
3248 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3249 m_BlockAllocation.m_Block = block;
3250 m_BlockAllocation.m_Offset = offset;
3254 void InitDedicatedAllocation(
3255 uint32_t memoryTypeIndex,
3256 VkDeviceMemory hMemory,
3257 VmaSuballocationType suballocationType,
3261 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3262 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3263 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3266 m_SuballocationType = (uint8_t)suballocationType;
3267 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3268 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3269 m_DedicatedAllocation.m_hMemory = hMemory;
3270 m_DedicatedAllocation.m_pMappedData = pMappedData;
3273 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3274 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3275 VkDeviceSize GetSize()
const {
return m_Size; }
3276 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3277 void* GetUserData()
const {
return m_pUserData; }
3278 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3279 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3281 VmaDeviceMemoryBlock* GetBlock()
const 3283 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3284 return m_BlockAllocation.m_Block;
3286 VkDeviceSize GetOffset()
const;
3287 VkDeviceMemory GetMemory()
const;
3288 uint32_t GetMemoryTypeIndex()
const;
3289 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3290 void* GetMappedData()
const;
3291 bool CanBecomeLost()
const;
3292 VmaPool GetPool()
const;
3294 uint32_t GetLastUseFrameIndex()
const 3296 return m_LastUseFrameIndex.load();
3298 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3300 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3310 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3312 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3314 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3325 void BlockAllocMap();
3326 void BlockAllocUnmap();
3327 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3328 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3331 VkDeviceSize m_Alignment;
3332 VkDeviceSize m_Size;
3334 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3336 uint8_t m_SuballocationType;
3343 struct BlockAllocation
3346 VmaDeviceMemoryBlock* m_Block;
3347 VkDeviceSize m_Offset;
3348 bool m_CanBecomeLost;
3352 struct DedicatedAllocation
3354 uint32_t m_MemoryTypeIndex;
3355 VkDeviceMemory m_hMemory;
3356 void* m_pMappedData;
3362 BlockAllocation m_BlockAllocation;
3364 DedicatedAllocation m_DedicatedAllocation;
3367 void FreeUserDataString(VmaAllocator hAllocator);
3374 struct VmaSuballocation
3376 VkDeviceSize offset;
3378 VmaAllocation hAllocation;
3379 VmaSuballocationType type;
3382 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3385 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3400 struct VmaAllocationRequest
3402 VkDeviceSize offset;
3403 VkDeviceSize sumFreeSize;
3404 VkDeviceSize sumItemSize;
3405 VmaSuballocationList::iterator item;
3406 size_t itemsToMakeLostCount;
3408 VkDeviceSize CalcCost()
const 3410 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3418 class VmaBlockMetadata
3421 VmaBlockMetadata(VmaAllocator hAllocator);
3422 ~VmaBlockMetadata();
3423 void Init(VkDeviceSize size);
3426 bool Validate()
const;
3427 VkDeviceSize GetSize()
const {
return m_Size; }
3428 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3429 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3430 VkDeviceSize GetUnusedRangeSizeMax()
const;
3432 bool IsEmpty()
const;
3434 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3437 #if VMA_STATS_STRING_ENABLED 3438 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3442 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3447 bool CreateAllocationRequest(
3448 uint32_t currentFrameIndex,
3449 uint32_t frameInUseCount,
3450 VkDeviceSize bufferImageGranularity,
3451 VkDeviceSize allocSize,
3452 VkDeviceSize allocAlignment,
3453 VmaSuballocationType allocType,
3454 bool canMakeOtherLost,
3455 VmaAllocationRequest* pAllocationRequest);
3457 bool MakeRequestedAllocationsLost(
3458 uint32_t currentFrameIndex,
3459 uint32_t frameInUseCount,
3460 VmaAllocationRequest* pAllocationRequest);
3462 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3466 const VmaAllocationRequest& request,
3467 VmaSuballocationType type,
3468 VkDeviceSize allocSize,
3469 VmaAllocation hAllocation);
3472 void Free(
const VmaAllocation allocation);
3475 VkDeviceSize m_Size;
3476 uint32_t m_FreeCount;
3477 VkDeviceSize m_SumFreeSize;
3478 VmaSuballocationList m_Suballocations;
3481 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3483 bool ValidateFreeSuballocationList()
const;
3487 bool CheckAllocation(
3488 uint32_t currentFrameIndex,
3489 uint32_t frameInUseCount,
3490 VkDeviceSize bufferImageGranularity,
3491 VkDeviceSize allocSize,
3492 VkDeviceSize allocAlignment,
3493 VmaSuballocationType allocType,
3494 VmaSuballocationList::const_iterator suballocItem,
3495 bool canMakeOtherLost,
3496 VkDeviceSize* pOffset,
3497 size_t* itemsToMakeLostCount,
3498 VkDeviceSize* pSumFreeSize,
3499 VkDeviceSize* pSumItemSize)
const;
3501 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3505 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3508 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3511 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3515 class VmaDeviceMemoryMapping
3518 VmaDeviceMemoryMapping();
3519 ~VmaDeviceMemoryMapping();
3521 void* GetMappedData()
const {
return m_pMappedData; }
3524 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory,
void **ppData);
3525 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory);
3529 uint32_t m_MapCount;
3530 void* m_pMappedData;
3539 class VmaDeviceMemoryBlock
3542 uint32_t m_MemoryTypeIndex;
3543 VkDeviceMemory m_hMemory;
3544 VmaDeviceMemoryMapping m_Mapping;
3545 VmaBlockMetadata m_Metadata;
3547 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3549 ~VmaDeviceMemoryBlock()
3551 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3556 uint32_t newMemoryTypeIndex,
3557 VkDeviceMemory newMemory,
3558 VkDeviceSize newSize);
3560 void Destroy(VmaAllocator allocator);
3563 bool Validate()
const;
3566 VkResult Map(VmaAllocator hAllocator,
void** ppData);
3567 void Unmap(VmaAllocator hAllocator);
3570 struct VmaPointerLess
3572 bool operator()(
const void* lhs,
const void* rhs)
const 3578 class VmaDefragmentator;
3586 struct VmaBlockVector
3589 VmaAllocator hAllocator,
3590 uint32_t memoryTypeIndex,
3591 VkDeviceSize preferredBlockSize,
3592 size_t minBlockCount,
3593 size_t maxBlockCount,
3594 VkDeviceSize bufferImageGranularity,
3595 uint32_t frameInUseCount,
3599 VkResult CreateMinBlocks();
3601 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3602 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3603 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3604 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3608 bool IsEmpty()
const {
return m_Blocks.empty(); }
3611 VmaPool hCurrentPool,
3612 uint32_t currentFrameIndex,
3613 const VkMemoryRequirements& vkMemReq,
3615 VmaSuballocationType suballocType,
3616 VmaAllocation* pAllocation);
3619 VmaAllocation hAllocation);
3624 #if VMA_STATS_STRING_ENABLED 3625 void PrintDetailedMap(
class VmaJsonWriter& json);
3628 void MakePoolAllocationsLost(
3629 uint32_t currentFrameIndex,
3630 size_t* pLostAllocationCount);
3632 VmaDefragmentator* EnsureDefragmentator(
3633 VmaAllocator hAllocator,
3634 uint32_t currentFrameIndex);
3636 VkResult Defragment(
3638 VkDeviceSize& maxBytesToMove,
3639 uint32_t& maxAllocationsToMove);
3641 void DestroyDefragmentator();
3644 friend class VmaDefragmentator;
3646 const VmaAllocator m_hAllocator;
3647 const uint32_t m_MemoryTypeIndex;
3648 const VkDeviceSize m_PreferredBlockSize;
3649 const size_t m_MinBlockCount;
3650 const size_t m_MaxBlockCount;
3651 const VkDeviceSize m_BufferImageGranularity;
3652 const uint32_t m_FrameInUseCount;
3653 const bool m_IsCustomPool;
3656 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3660 bool m_HasEmptyBlock;
3661 VmaDefragmentator* m_pDefragmentator;
3664 void Remove(VmaDeviceMemoryBlock* pBlock);
3668 void IncrementallySortBlocks();
3670 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3676 VmaBlockVector m_BlockVector;
3680 VmaAllocator hAllocator,
3684 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3686 #if VMA_STATS_STRING_ENABLED 3691 class VmaDefragmentator
3693 const VmaAllocator m_hAllocator;
3694 VmaBlockVector*
const m_pBlockVector;
3695 uint32_t m_CurrentFrameIndex;
3696 VkDeviceSize m_BytesMoved;
3697 uint32_t m_AllocationsMoved;
3699 struct AllocationInfo
3701 VmaAllocation m_hAllocation;
3702 VkBool32* m_pChanged;
3705 m_hAllocation(VK_NULL_HANDLE),
3706 m_pChanged(VMA_NULL)
3711 struct AllocationInfoSizeGreater
3713 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3715 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3720 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3724 VmaDeviceMemoryBlock* m_pBlock;
3725 bool m_HasNonMovableAllocations;
3726 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3728 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3730 m_HasNonMovableAllocations(true),
3731 m_Allocations(pAllocationCallbacks),
3732 m_pMappedDataForDefragmentation(VMA_NULL)
3736 void CalcHasNonMovableAllocations()
3738 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3739 const size_t defragmentAllocCount = m_Allocations.size();
3740 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3743 void SortAllocationsBySizeDescecnding()
3745 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3748 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3749 void Unmap(VmaAllocator hAllocator);
3753 void* m_pMappedDataForDefragmentation;
3756 struct BlockPointerLess
3758 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3760 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3762 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3764 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3770 struct BlockInfoCompareMoveDestination
3772 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3774 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3778 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3782 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3790 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3791 BlockInfoVector m_Blocks;
3793 VkResult DefragmentRound(
3794 VkDeviceSize maxBytesToMove,
3795 uint32_t maxAllocationsToMove);
3797 static bool MoveMakesSense(
3798 size_t dstBlockIndex, VkDeviceSize dstOffset,
3799 size_t srcBlockIndex, VkDeviceSize srcOffset);
3803 VmaAllocator hAllocator,
3804 VmaBlockVector* pBlockVector,
3805 uint32_t currentFrameIndex);
3807 ~VmaDefragmentator();
3809 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3810 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3812 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3814 VkResult Defragment(
3815 VkDeviceSize maxBytesToMove,
3816 uint32_t maxAllocationsToMove);
3820 struct VmaAllocator_T
3823 bool m_UseKhrDedicatedAllocation;
3825 bool m_AllocationCallbacksSpecified;
3826 VkAllocationCallbacks m_AllocationCallbacks;
3830 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3831 VMA_MUTEX m_HeapSizeLimitMutex;
3833 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3834 VkPhysicalDeviceMemoryProperties m_MemProps;
3837 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
3840 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3841 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
3842 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3847 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3849 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3853 return m_VulkanFunctions;
3856 VkDeviceSize GetBufferImageGranularity()
const 3859 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3860 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3863 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3864 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3866 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3868 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3869 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3872 void GetBufferMemoryRequirements(
3874 VkMemoryRequirements& memReq,
3875 bool& requiresDedicatedAllocation,
3876 bool& prefersDedicatedAllocation)
const;
3877 void GetImageMemoryRequirements(
3879 VkMemoryRequirements& memReq,
3880 bool& requiresDedicatedAllocation,
3881 bool& prefersDedicatedAllocation)
const;
3884 VkResult AllocateMemory(
3885 const VkMemoryRequirements& vkMemReq,
3886 bool requiresDedicatedAllocation,
3887 bool prefersDedicatedAllocation,
3888 VkBuffer dedicatedBuffer,
3889 VkImage dedicatedImage,
3891 VmaSuballocationType suballocType,
3892 VmaAllocation* pAllocation);
3895 void FreeMemory(
const VmaAllocation allocation);
3897 void CalculateStats(
VmaStats* pStats);
3899 #if VMA_STATS_STRING_ENABLED 3900 void PrintDetailedMap(
class VmaJsonWriter& json);
3903 VkResult Defragment(
3904 VmaAllocation* pAllocations,
3905 size_t allocationCount,
3906 VkBool32* pAllocationsChanged,
3910 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3913 void DestroyPool(VmaPool pool);
3914 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3916 void SetCurrentFrameIndex(uint32_t frameIndex);
3918 void MakePoolAllocationsLost(
3920 size_t* pLostAllocationCount);
3922 void CreateLostAllocation(VmaAllocation* pAllocation);
3924 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3925 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3927 VkResult Map(VmaAllocation hAllocation,
void** ppData);
3928 void Unmap(VmaAllocation hAllocation);
3931 VkDeviceSize m_PreferredLargeHeapBlockSize;
3932 VkDeviceSize m_PreferredSmallHeapBlockSize;
3934 VkPhysicalDevice m_PhysicalDevice;
3935 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3937 VMA_MUTEX m_PoolsMutex;
3939 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3945 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3947 VkResult AllocateMemoryOfType(
3948 const VkMemoryRequirements& vkMemReq,
3949 bool dedicatedAllocation,
3950 VkBuffer dedicatedBuffer,
3951 VkImage dedicatedImage,
3953 uint32_t memTypeIndex,
3954 VmaSuballocationType suballocType,
3955 VmaAllocation* pAllocation);
3958 VkResult AllocateDedicatedMemory(
3960 VmaSuballocationType suballocType,
3961 uint32_t memTypeIndex,
3963 bool isUserDataString,
3965 VkBuffer dedicatedBuffer,
3966 VkImage dedicatedImage,
3967 VmaAllocation* pAllocation);
3970 void FreeDedicatedMemory(VmaAllocation allocation);
3976 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3978 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3981 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3983 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3986 template<
typename T>
3987 static T* VmaAllocate(VmaAllocator hAllocator)
3989 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3992 template<
typename T>
3993 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3995 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3998 template<
typename T>
3999 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4004 VmaFree(hAllocator, ptr);
4008 template<
typename T>
4009 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4013 for(
size_t i = count; i--; )
4015 VmaFree(hAllocator, ptr);
4022 #if VMA_STATS_STRING_ENABLED 4024 class VmaStringBuilder
4027 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4028 size_t GetLength()
const {
return m_Data.size(); }
4029 const char* GetData()
const {
return m_Data.data(); }
4031 void Add(
char ch) { m_Data.push_back(ch); }
4032 void Add(
const char* pStr);
4033 void AddNewLine() { Add(
'\n'); }
4034 void AddNumber(uint32_t num);
4035 void AddNumber(uint64_t num);
4036 void AddPointer(
const void* ptr);
4039 VmaVector< char, VmaStlAllocator<char> > m_Data;
4042 void VmaStringBuilder::Add(
const char* pStr)
4044 const size_t strLen = strlen(pStr);
4047 const size_t oldCount = m_Data.size();
4048 m_Data.resize(oldCount + strLen);
4049 memcpy(m_Data.data() + oldCount, pStr, strLen);
4053 void VmaStringBuilder::AddNumber(uint32_t num)
4056 VmaUint32ToStr(buf,
sizeof(buf), num);
4060 void VmaStringBuilder::AddNumber(uint64_t num)
4063 VmaUint64ToStr(buf,
sizeof(buf), num);
4067 void VmaStringBuilder::AddPointer(
const void* ptr)
4070 VmaPtrToStr(buf,
sizeof(buf), ptr);
4074 #endif // #if VMA_STATS_STRING_ENABLED 4079 #if VMA_STATS_STRING_ENABLED 4084 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4087 void BeginObject(
bool singleLine =
false);
4090 void BeginArray(
bool singleLine =
false);
4093 void WriteString(
const char* pStr);
4094 void BeginString(
const char* pStr = VMA_NULL);
4095 void ContinueString(
const char* pStr);
4096 void ContinueString(uint32_t n);
4097 void ContinueString(uint64_t n);
4098 void ContinueString_Pointer(
const void* ptr);
4099 void EndString(
const char* pStr = VMA_NULL);
4101 void WriteNumber(uint32_t n);
4102 void WriteNumber(uint64_t n);
4103 void WriteBool(
bool b);
4107 static const char*
const INDENT;
4109 enum COLLECTION_TYPE
4111 COLLECTION_TYPE_OBJECT,
4112 COLLECTION_TYPE_ARRAY,
4116 COLLECTION_TYPE type;
4117 uint32_t valueCount;
4118 bool singleLineMode;
4121 VmaStringBuilder& m_SB;
4122 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4123 bool m_InsideString;
4125 void BeginValue(
bool isString);
4126 void WriteIndent(
bool oneLess =
false);
4129 const char*
const VmaJsonWriter::INDENT =
" ";
4131 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4133 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4134 m_InsideString(false)
4138 VmaJsonWriter::~VmaJsonWriter()
4140 VMA_ASSERT(!m_InsideString);
4141 VMA_ASSERT(m_Stack.empty());
4144 void VmaJsonWriter::BeginObject(
bool singleLine)
4146 VMA_ASSERT(!m_InsideString);
4152 item.type = COLLECTION_TYPE_OBJECT;
4153 item.valueCount = 0;
4154 item.singleLineMode = singleLine;
4155 m_Stack.push_back(item);
4158 void VmaJsonWriter::EndObject()
4160 VMA_ASSERT(!m_InsideString);
4165 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4169 void VmaJsonWriter::BeginArray(
bool singleLine)
4171 VMA_ASSERT(!m_InsideString);
4177 item.type = COLLECTION_TYPE_ARRAY;
4178 item.valueCount = 0;
4179 item.singleLineMode = singleLine;
4180 m_Stack.push_back(item);
4183 void VmaJsonWriter::EndArray()
4185 VMA_ASSERT(!m_InsideString);
4190 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4194 void VmaJsonWriter::WriteString(
const char* pStr)
4200 void VmaJsonWriter::BeginString(
const char* pStr)
4202 VMA_ASSERT(!m_InsideString);
4206 m_InsideString =
true;
4207 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4209 ContinueString(pStr);
4213 void VmaJsonWriter::ContinueString(
const char* pStr)
4215 VMA_ASSERT(m_InsideString);
4217 const size_t strLen = strlen(pStr);
4218 for(
size_t i = 0; i < strLen; ++i)
4251 VMA_ASSERT(0 &&
"Character not currently supported.");
4257 void VmaJsonWriter::ContinueString(uint32_t n)
4259 VMA_ASSERT(m_InsideString);
4263 void VmaJsonWriter::ContinueString(uint64_t n)
4265 VMA_ASSERT(m_InsideString);
4269 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4271 VMA_ASSERT(m_InsideString);
4272 m_SB.AddPointer(ptr);
4275 void VmaJsonWriter::EndString(
const char* pStr)
4277 VMA_ASSERT(m_InsideString);
4278 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4280 ContinueString(pStr);
4283 m_InsideString =
false;
4286 void VmaJsonWriter::WriteNumber(uint32_t n)
4288 VMA_ASSERT(!m_InsideString);
4293 void VmaJsonWriter::WriteNumber(uint64_t n)
4295 VMA_ASSERT(!m_InsideString);
4300 void VmaJsonWriter::WriteBool(
bool b)
4302 VMA_ASSERT(!m_InsideString);
4304 m_SB.Add(b ?
"true" :
"false");
4307 void VmaJsonWriter::WriteNull()
4309 VMA_ASSERT(!m_InsideString);
4314 void VmaJsonWriter::BeginValue(
bool isString)
4316 if(!m_Stack.empty())
4318 StackItem& currItem = m_Stack.back();
4319 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4320 currItem.valueCount % 2 == 0)
4322 VMA_ASSERT(isString);
4325 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4326 currItem.valueCount % 2 != 0)
4330 else if(currItem.valueCount > 0)
4339 ++currItem.valueCount;
4343 void VmaJsonWriter::WriteIndent(
bool oneLess)
4345 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4349 size_t count = m_Stack.size();
4350 if(count > 0 && oneLess)
4354 for(
size_t i = 0; i < count; ++i)
4361 #endif // #if VMA_STATS_STRING_ENABLED 4365 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4367 if(IsUserDataString())
4369 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4371 FreeUserDataString(hAllocator);
4373 if(pUserData != VMA_NULL)
4375 const char*
const newStrSrc = (
char*)pUserData;
4376 const size_t newStrLen = strlen(newStrSrc);
4377 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4378 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4379 m_pUserData = newStrDst;
4384 m_pUserData = pUserData;
4388 VkDeviceSize VmaAllocation_T::GetOffset()
const 4392 case ALLOCATION_TYPE_BLOCK:
4393 return m_BlockAllocation.m_Offset;
4394 case ALLOCATION_TYPE_DEDICATED:
4402 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4406 case ALLOCATION_TYPE_BLOCK:
4407 return m_BlockAllocation.m_Block->m_hMemory;
4408 case ALLOCATION_TYPE_DEDICATED:
4409 return m_DedicatedAllocation.m_hMemory;
4412 return VK_NULL_HANDLE;
4416 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4420 case ALLOCATION_TYPE_BLOCK:
4421 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4422 case ALLOCATION_TYPE_DEDICATED:
4423 return m_DedicatedAllocation.m_MemoryTypeIndex;
4430 void* VmaAllocation_T::GetMappedData()
const 4434 case ALLOCATION_TYPE_BLOCK:
4437 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4438 VMA_ASSERT(pBlockData != VMA_NULL);
4439 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4446 case ALLOCATION_TYPE_DEDICATED:
4447 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4448 return m_DedicatedAllocation.m_pMappedData;
4455 bool VmaAllocation_T::CanBecomeLost()
const 4459 case ALLOCATION_TYPE_BLOCK:
4460 return m_BlockAllocation.m_CanBecomeLost;
4461 case ALLOCATION_TYPE_DEDICATED:
4469 VmaPool VmaAllocation_T::GetPool()
const 4471 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4472 return m_BlockAllocation.m_hPool;
4475 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4477 VMA_ASSERT(CanBecomeLost());
4483 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4486 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4491 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4497 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4507 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4509 VMA_ASSERT(IsUserDataString());
4510 if(m_pUserData != VMA_NULL)
4512 char*
const oldStr = (
char*)m_pUserData;
4513 const size_t oldStrLen = strlen(oldStr);
4514 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4515 m_pUserData = VMA_NULL;
4519 void VmaAllocation_T::BlockAllocMap()
4521 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4523 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4529 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4533 void VmaAllocation_T::BlockAllocUnmap()
4535 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4537 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4543 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4547 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4549 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4553 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4555 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4556 *ppData = m_DedicatedAllocation.m_pMappedData;
4562 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4563 return VK_ERROR_MEMORY_MAP_FAILED;
4568 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4569 hAllocator->m_hDevice,
4570 m_DedicatedAllocation.m_hMemory,
4575 if(result == VK_SUCCESS)
4577 m_DedicatedAllocation.m_pMappedData = *ppData;
4584 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4586 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4588 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4593 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4594 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4595 hAllocator->m_hDevice,
4596 m_DedicatedAllocation.m_hMemory);
4601 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4605 #if VMA_STATS_STRING_ENABLED 4608 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4617 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4621 json.WriteString(
"Blocks");
4624 json.WriteString(
"Allocations");
4627 json.WriteString(
"UnusedRanges");
4630 json.WriteString(
"UsedBytes");
4633 json.WriteString(
"UnusedBytes");
4638 json.WriteString(
"AllocationSize");
4639 json.BeginObject(
true);
4640 json.WriteString(
"Min");
4642 json.WriteString(
"Avg");
4644 json.WriteString(
"Max");
4651 json.WriteString(
"UnusedRangeSize");
4652 json.BeginObject(
true);
4653 json.WriteString(
"Min");
4655 json.WriteString(
"Avg");
4657 json.WriteString(
"Max");
4665 #endif // #if VMA_STATS_STRING_ENABLED 4667 struct VmaSuballocationItemSizeLess
4670 const VmaSuballocationList::iterator lhs,
4671 const VmaSuballocationList::iterator rhs)
const 4673 return lhs->size < rhs->size;
4676 const VmaSuballocationList::iterator lhs,
4677 VkDeviceSize rhsSize)
const 4679 return lhs->size < rhsSize;
4686 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4690 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4691 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4695 VmaBlockMetadata::~VmaBlockMetadata()
4699 void VmaBlockMetadata::Init(VkDeviceSize size)
4703 m_SumFreeSize = size;
4705 VmaSuballocation suballoc = {};
4706 suballoc.offset = 0;
4707 suballoc.size = size;
4708 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4709 suballoc.hAllocation = VK_NULL_HANDLE;
4711 m_Suballocations.push_back(suballoc);
4712 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4714 m_FreeSuballocationsBySize.push_back(suballocItem);
4717 bool VmaBlockMetadata::Validate()
const 4719 if(m_Suballocations.empty())
4725 VkDeviceSize calculatedOffset = 0;
4727 uint32_t calculatedFreeCount = 0;
4729 VkDeviceSize calculatedSumFreeSize = 0;
4732 size_t freeSuballocationsToRegister = 0;
4734 bool prevFree =
false;
4736 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4737 suballocItem != m_Suballocations.cend();
4740 const VmaSuballocation& subAlloc = *suballocItem;
4743 if(subAlloc.offset != calculatedOffset)
4748 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4750 if(prevFree && currFree)
4754 prevFree = currFree;
4756 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4763 calculatedSumFreeSize += subAlloc.size;
4764 ++calculatedFreeCount;
4765 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4767 ++freeSuballocationsToRegister;
4771 calculatedOffset += subAlloc.size;
4776 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4781 VkDeviceSize lastSize = 0;
4782 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4784 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4787 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4792 if(suballocItem->size < lastSize)
4797 lastSize = suballocItem->size;
4802 ValidateFreeSuballocationList() &&
4803 (calculatedOffset == m_Size) &&
4804 (calculatedSumFreeSize == m_SumFreeSize) &&
4805 (calculatedFreeCount == m_FreeCount);
4808 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4810 if(!m_FreeSuballocationsBySize.empty())
4812 return m_FreeSuballocationsBySize.back()->size;
4820 bool VmaBlockMetadata::IsEmpty()
const 4822 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4825 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4829 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4841 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4842 suballocItem != m_Suballocations.cend();
4845 const VmaSuballocation& suballoc = *suballocItem;
4846 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4859 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4861 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4863 inoutStats.
size += m_Size;
4870 #if VMA_STATS_STRING_ENABLED 4872 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4876 json.WriteString(
"TotalBytes");
4877 json.WriteNumber(m_Size);
4879 json.WriteString(
"UnusedBytes");
4880 json.WriteNumber(m_SumFreeSize);
4882 json.WriteString(
"Allocations");
4883 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4885 json.WriteString(
"UnusedRanges");
4886 json.WriteNumber(m_FreeCount);
4888 json.WriteString(
"Suballocations");
4891 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4892 suballocItem != m_Suballocations.cend();
4893 ++suballocItem, ++i)
4895 json.BeginObject(
true);
4897 json.WriteString(
"Type");
4898 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4900 json.WriteString(
"Size");
4901 json.WriteNumber(suballocItem->size);
4903 json.WriteString(
"Offset");
4904 json.WriteNumber(suballocItem->offset);
4906 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4908 const void* pUserData = suballocItem->hAllocation->GetUserData();
4909 if(pUserData != VMA_NULL)
4911 json.WriteString(
"UserData");
4912 if(suballocItem->hAllocation->IsUserDataString())
4914 json.WriteString((
const char*)pUserData);
4919 json.ContinueString_Pointer(pUserData);
4932 #endif // #if VMA_STATS_STRING_ENABLED 4944 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4946 VMA_ASSERT(IsEmpty());
4947 pAllocationRequest->offset = 0;
4948 pAllocationRequest->sumFreeSize = m_SumFreeSize;
4949 pAllocationRequest->sumItemSize = 0;
4950 pAllocationRequest->item = m_Suballocations.begin();
4951 pAllocationRequest->itemsToMakeLostCount = 0;
4954 bool VmaBlockMetadata::CreateAllocationRequest(
4955 uint32_t currentFrameIndex,
4956 uint32_t frameInUseCount,
4957 VkDeviceSize bufferImageGranularity,
4958 VkDeviceSize allocSize,
4959 VkDeviceSize allocAlignment,
4960 VmaSuballocationType allocType,
4961 bool canMakeOtherLost,
4962 VmaAllocationRequest* pAllocationRequest)
4964 VMA_ASSERT(allocSize > 0);
4965 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4966 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4967 VMA_HEAVY_ASSERT(Validate());
4970 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4976 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4977 if(freeSuballocCount > 0)
4982 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4983 m_FreeSuballocationsBySize.data(),
4984 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4986 VmaSuballocationItemSizeLess());
4987 size_t index = it - m_FreeSuballocationsBySize.data();
4988 for(; index < freeSuballocCount; ++index)
4993 bufferImageGranularity,
4997 m_FreeSuballocationsBySize[index],
4999 &pAllocationRequest->offset,
5000 &pAllocationRequest->itemsToMakeLostCount,
5001 &pAllocationRequest->sumFreeSize,
5002 &pAllocationRequest->sumItemSize))
5004 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5012 for(
size_t index = freeSuballocCount; index--; )
5017 bufferImageGranularity,
5021 m_FreeSuballocationsBySize[index],
5023 &pAllocationRequest->offset,
5024 &pAllocationRequest->itemsToMakeLostCount,
5025 &pAllocationRequest->sumFreeSize,
5026 &pAllocationRequest->sumItemSize))
5028 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5035 if(canMakeOtherLost)
5039 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5040 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5042 VmaAllocationRequest tmpAllocRequest = {};
5043 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5044 suballocIt != m_Suballocations.end();
5047 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5048 suballocIt->hAllocation->CanBecomeLost())
5053 bufferImageGranularity,
5059 &tmpAllocRequest.offset,
5060 &tmpAllocRequest.itemsToMakeLostCount,
5061 &tmpAllocRequest.sumFreeSize,
5062 &tmpAllocRequest.sumItemSize))
5064 tmpAllocRequest.item = suballocIt;
5066 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5068 *pAllocationRequest = tmpAllocRequest;
5074 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5083 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5084 uint32_t currentFrameIndex,
5085 uint32_t frameInUseCount,
5086 VmaAllocationRequest* pAllocationRequest)
5088 while(pAllocationRequest->itemsToMakeLostCount > 0)
5090 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5092 ++pAllocationRequest->item;
5094 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5095 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5096 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5097 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5099 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5100 --pAllocationRequest->itemsToMakeLostCount;
5108 VMA_HEAVY_ASSERT(Validate());
5109 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5110 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5115 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5117 uint32_t lostAllocationCount = 0;
5118 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5119 it != m_Suballocations.end();
5122 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5123 it->hAllocation->CanBecomeLost() &&
5124 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5126 it = FreeSuballocation(it);
5127 ++lostAllocationCount;
5130 return lostAllocationCount;
5133 void VmaBlockMetadata::Alloc(
5134 const VmaAllocationRequest& request,
5135 VmaSuballocationType type,
5136 VkDeviceSize allocSize,
5137 VmaAllocation hAllocation)
5139 VMA_ASSERT(request.item != m_Suballocations.end());
5140 VmaSuballocation& suballoc = *request.item;
5142 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5144 VMA_ASSERT(request.offset >= suballoc.offset);
5145 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5146 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5147 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5151 UnregisterFreeSuballocation(request.item);
5153 suballoc.offset = request.offset;
5154 suballoc.size = allocSize;
5155 suballoc.type = type;
5156 suballoc.hAllocation = hAllocation;
5161 VmaSuballocation paddingSuballoc = {};
5162 paddingSuballoc.offset = request.offset + allocSize;
5163 paddingSuballoc.size = paddingEnd;
5164 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5165 VmaSuballocationList::iterator next = request.item;
5167 const VmaSuballocationList::iterator paddingEndItem =
5168 m_Suballocations.insert(next, paddingSuballoc);
5169 RegisterFreeSuballocation(paddingEndItem);
5175 VmaSuballocation paddingSuballoc = {};
5176 paddingSuballoc.offset = request.offset - paddingBegin;
5177 paddingSuballoc.size = paddingBegin;
5178 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5179 const VmaSuballocationList::iterator paddingBeginItem =
5180 m_Suballocations.insert(request.item, paddingSuballoc);
5181 RegisterFreeSuballocation(paddingBeginItem);
5185 m_FreeCount = m_FreeCount - 1;
5186 if(paddingBegin > 0)
5194 m_SumFreeSize -= allocSize;
5197 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5199 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5200 suballocItem != m_Suballocations.end();
5203 VmaSuballocation& suballoc = *suballocItem;
5204 if(suballoc.hAllocation == allocation)
5206 FreeSuballocation(suballocItem);
5207 VMA_HEAVY_ASSERT(Validate());
5211 VMA_ASSERT(0 &&
"Not found!");
5214 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5216 VkDeviceSize lastSize = 0;
5217 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5219 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5221 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5226 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5231 if(it->size < lastSize)
5237 lastSize = it->size;
5242 bool VmaBlockMetadata::CheckAllocation(
5243 uint32_t currentFrameIndex,
5244 uint32_t frameInUseCount,
5245 VkDeviceSize bufferImageGranularity,
5246 VkDeviceSize allocSize,
5247 VkDeviceSize allocAlignment,
5248 VmaSuballocationType allocType,
5249 VmaSuballocationList::const_iterator suballocItem,
5250 bool canMakeOtherLost,
5251 VkDeviceSize* pOffset,
5252 size_t* itemsToMakeLostCount,
5253 VkDeviceSize* pSumFreeSize,
5254 VkDeviceSize* pSumItemSize)
const 5256 VMA_ASSERT(allocSize > 0);
5257 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5258 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5259 VMA_ASSERT(pOffset != VMA_NULL);
5261 *itemsToMakeLostCount = 0;
5265 if(canMakeOtherLost)
5267 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5269 *pSumFreeSize = suballocItem->size;
5273 if(suballocItem->hAllocation->CanBecomeLost() &&
5274 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5276 ++*itemsToMakeLostCount;
5277 *pSumItemSize = suballocItem->size;
5286 if(m_Size - suballocItem->offset < allocSize)
5292 *pOffset = suballocItem->offset;
5295 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5297 *pOffset += VMA_DEBUG_MARGIN;
5301 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5302 *pOffset = VmaAlignUp(*pOffset, alignment);
5306 if(bufferImageGranularity > 1)
5308 bool bufferImageGranularityConflict =
false;
5309 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5310 while(prevSuballocItem != m_Suballocations.cbegin())
5313 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5314 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5316 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5318 bufferImageGranularityConflict =
true;
5326 if(bufferImageGranularityConflict)
5328 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5334 if(*pOffset >= suballocItem->offset + suballocItem->size)
5340 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5343 VmaSuballocationList::const_iterator next = suballocItem;
5345 const VkDeviceSize requiredEndMargin =
5346 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5348 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5350 if(suballocItem->offset + totalSize > m_Size)
5357 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5358 if(totalSize > suballocItem->size)
5360 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5361 while(remainingSize > 0)
5364 if(lastSuballocItem == m_Suballocations.cend())
5368 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5370 *pSumFreeSize += lastSuballocItem->size;
5374 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5375 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5376 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5378 ++*itemsToMakeLostCount;
5379 *pSumItemSize += lastSuballocItem->size;
5386 remainingSize = (lastSuballocItem->size < remainingSize) ?
5387 remainingSize - lastSuballocItem->size : 0;
5393 if(bufferImageGranularity > 1)
5395 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5397 while(nextSuballocItem != m_Suballocations.cend())
5399 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5400 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5402 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5404 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5405 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5406 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5408 ++*itemsToMakeLostCount;
5427 const VmaSuballocation& suballoc = *suballocItem;
5428 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5430 *pSumFreeSize = suballoc.size;
5433 if(suballoc.size < allocSize)
5439 *pOffset = suballoc.offset;
5442 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5444 *pOffset += VMA_DEBUG_MARGIN;
5448 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5449 *pOffset = VmaAlignUp(*pOffset, alignment);
5453 if(bufferImageGranularity > 1)
5455 bool bufferImageGranularityConflict =
false;
5456 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5457 while(prevSuballocItem != m_Suballocations.cbegin())
5460 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5461 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5463 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5465 bufferImageGranularityConflict =
true;
5473 if(bufferImageGranularityConflict)
5475 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5480 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5483 VmaSuballocationList::const_iterator next = suballocItem;
5485 const VkDeviceSize requiredEndMargin =
5486 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5489 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5496 if(bufferImageGranularity > 1)
5498 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5500 while(nextSuballocItem != m_Suballocations.cend())
5502 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5503 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5505 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5524 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5526 VMA_ASSERT(item != m_Suballocations.end());
5527 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5529 VmaSuballocationList::iterator nextItem = item;
5531 VMA_ASSERT(nextItem != m_Suballocations.end());
5532 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5534 item->size += nextItem->size;
5536 m_Suballocations.erase(nextItem);
5539 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5542 VmaSuballocation& suballoc = *suballocItem;
5543 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5544 suballoc.hAllocation = VK_NULL_HANDLE;
5548 m_SumFreeSize += suballoc.size;
5551 bool mergeWithNext =
false;
5552 bool mergeWithPrev =
false;
5554 VmaSuballocationList::iterator nextItem = suballocItem;
5556 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5558 mergeWithNext =
true;
5561 VmaSuballocationList::iterator prevItem = suballocItem;
5562 if(suballocItem != m_Suballocations.begin())
5565 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5567 mergeWithPrev =
true;
5573 UnregisterFreeSuballocation(nextItem);
5574 MergeFreeWithNext(suballocItem);
5579 UnregisterFreeSuballocation(prevItem);
5580 MergeFreeWithNext(prevItem);
5581 RegisterFreeSuballocation(prevItem);
5586 RegisterFreeSuballocation(suballocItem);
5587 return suballocItem;
5591 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5593 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5594 VMA_ASSERT(item->size > 0);
5598 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5600 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5602 if(m_FreeSuballocationsBySize.empty())
5604 m_FreeSuballocationsBySize.push_back(item);
5608 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5616 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5618 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5619 VMA_ASSERT(item->size > 0);
5623 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5625 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5627 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5628 m_FreeSuballocationsBySize.data(),
5629 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5631 VmaSuballocationItemSizeLess());
5632 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5633 index < m_FreeSuballocationsBySize.size();
5636 if(m_FreeSuballocationsBySize[index] == item)
5638 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5641 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5643 VMA_ASSERT(0 &&
"Not found.");
5652 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5654 m_pMappedData(VMA_NULL)
5658 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5660 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5663 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory,
void **ppData)
5665 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5669 VMA_ASSERT(m_pMappedData != VMA_NULL);
5670 if(ppData != VMA_NULL)
5672 *ppData = m_pMappedData;
5678 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5679 hAllocator->m_hDevice,
5685 if(result == VK_SUCCESS)
5687 if(ppData != VMA_NULL)
5689 *ppData = m_pMappedData;
5697 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory)
5699 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5702 if(--m_MapCount == 0)
5704 m_pMappedData = VMA_NULL;
5705 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
5710 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
5717 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5718 m_MemoryTypeIndex(UINT32_MAX),
5719 m_hMemory(VK_NULL_HANDLE),
5720 m_Metadata(hAllocator)
5724 void VmaDeviceMemoryBlock::Init(
5725 uint32_t newMemoryTypeIndex,
5726 VkDeviceMemory newMemory,
5727 VkDeviceSize newSize)
5729 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5731 m_MemoryTypeIndex = newMemoryTypeIndex;
5732 m_hMemory = newMemory;
5734 m_Metadata.Init(newSize);
5737 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5741 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5743 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5744 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5745 m_hMemory = VK_NULL_HANDLE;
5748 bool VmaDeviceMemoryBlock::Validate()
const 5750 if((m_hMemory == VK_NULL_HANDLE) ||
5751 (m_Metadata.GetSize() == 0))
5756 return m_Metadata.Validate();
5759 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator,
void** ppData)
5761 return m_Mapping.Map(hAllocator, m_hMemory, ppData);
5764 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator)
5766 m_Mapping.Unmap(hAllocator, m_hMemory);
5771 memset(&outInfo, 0,
sizeof(outInfo));
5790 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5798 VmaPool_T::VmaPool_T(
5799 VmaAllocator hAllocator,
5803 createInfo.memoryTypeIndex,
5804 createInfo.blockSize,
5805 createInfo.minBlockCount,
5806 createInfo.maxBlockCount,
5808 createInfo.frameInUseCount,
5813 VmaPool_T::~VmaPool_T()
5817 #if VMA_STATS_STRING_ENABLED 5819 #endif // #if VMA_STATS_STRING_ENABLED 5821 VmaBlockVector::VmaBlockVector(
5822 VmaAllocator hAllocator,
5823 uint32_t memoryTypeIndex,
5824 VkDeviceSize preferredBlockSize,
5825 size_t minBlockCount,
5826 size_t maxBlockCount,
5827 VkDeviceSize bufferImageGranularity,
5828 uint32_t frameInUseCount,
5829 bool isCustomPool) :
5830 m_hAllocator(hAllocator),
5831 m_MemoryTypeIndex(memoryTypeIndex),
5832 m_PreferredBlockSize(preferredBlockSize),
5833 m_MinBlockCount(minBlockCount),
5834 m_MaxBlockCount(maxBlockCount),
5835 m_BufferImageGranularity(bufferImageGranularity),
5836 m_FrameInUseCount(frameInUseCount),
5837 m_IsCustomPool(isCustomPool),
5838 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5839 m_HasEmptyBlock(false),
5840 m_pDefragmentator(VMA_NULL)
5844 VmaBlockVector::~VmaBlockVector()
5846 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5848 for(
size_t i = m_Blocks.size(); i--; )
5850 m_Blocks[i]->Destroy(m_hAllocator);
5851 vma_delete(m_hAllocator, m_Blocks[i]);
5855 VkResult VmaBlockVector::CreateMinBlocks()
5857 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5859 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5860 if(res != VK_SUCCESS)
5868 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5876 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5878 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5880 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5882 VMA_HEAVY_ASSERT(pBlock->Validate());
5883 pBlock->m_Metadata.AddPoolStats(*pStats);
5887 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5889 VkResult VmaBlockVector::Allocate(
5890 VmaPool hCurrentPool,
5891 uint32_t currentFrameIndex,
5892 const VkMemoryRequirements& vkMemReq,
5894 VmaSuballocationType suballocType,
5895 VmaAllocation* pAllocation)
5900 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5904 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5906 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5907 VMA_ASSERT(pCurrBlock);
5908 VmaAllocationRequest currRequest = {};
5909 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5912 m_BufferImageGranularity,
5920 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5924 VkResult res = pCurrBlock->Map(m_hAllocator,
nullptr);
5925 if(res != VK_SUCCESS)
5932 if(pCurrBlock->m_Metadata.IsEmpty())
5934 m_HasEmptyBlock =
false;
5937 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
5938 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5939 (*pAllocation)->InitBlockAllocation(
5948 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5949 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5950 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
5955 const bool canCreateNewBlock =
5957 (m_Blocks.size() < m_MaxBlockCount);
5960 if(canCreateNewBlock)
5963 VkDeviceSize blockSize = m_PreferredBlockSize;
5964 size_t newBlockIndex = 0;
5965 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5968 if(res < 0 && m_IsCustomPool ==
false)
5972 if(blockSize >= vkMemReq.size)
5974 res = CreateBlock(blockSize, &newBlockIndex);
5979 if(blockSize >= vkMemReq.size)
5981 res = CreateBlock(blockSize, &newBlockIndex);
5986 if(res == VK_SUCCESS)
5988 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5989 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
5993 res = pBlock->Map(m_hAllocator,
nullptr);
5994 if(res != VK_SUCCESS)
6001 VmaAllocationRequest allocRequest;
6002 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6003 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6004 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6005 (*pAllocation)->InitBlockAllocation(
6008 allocRequest.offset,
6014 VMA_HEAVY_ASSERT(pBlock->Validate());
6015 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6016 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6024 if(canMakeOtherLost)
6026 uint32_t tryIndex = 0;
6027 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6029 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6030 VmaAllocationRequest bestRequest = {};
6031 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6035 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6037 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6038 VMA_ASSERT(pCurrBlock);
6039 VmaAllocationRequest currRequest = {};
6040 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6043 m_BufferImageGranularity,
6050 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6051 if(pBestRequestBlock == VMA_NULL ||
6052 currRequestCost < bestRequestCost)
6054 pBestRequestBlock = pCurrBlock;
6055 bestRequest = currRequest;
6056 bestRequestCost = currRequestCost;
6058 if(bestRequestCost == 0)
6066 if(pBestRequestBlock != VMA_NULL)
6070 VkResult res = pBestRequestBlock->Map(m_hAllocator,
nullptr);
6071 if(res != VK_SUCCESS)
6077 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6083 if(pBestRequestBlock->m_Metadata.IsEmpty())
6085 m_HasEmptyBlock =
false;
6088 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6089 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6090 (*pAllocation)->InitBlockAllocation(
6099 VMA_HEAVY_ASSERT(pBlock->Validate());
6100 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6101 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6115 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6117 return VK_ERROR_TOO_MANY_OBJECTS;
6121 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6124 void VmaBlockVector::Free(
6125 VmaAllocation hAllocation)
6127 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6131 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6133 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6135 if(hAllocation->IsPersistentMap())
6137 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory);
6140 pBlock->m_Metadata.Free(hAllocation);
6141 VMA_HEAVY_ASSERT(pBlock->Validate());
6143 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6146 if(pBlock->m_Metadata.IsEmpty())
6149 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6151 pBlockToDelete = pBlock;
6157 m_HasEmptyBlock =
true;
6162 else if(m_HasEmptyBlock)
6164 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6165 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6167 pBlockToDelete = pLastBlock;
6168 m_Blocks.pop_back();
6169 m_HasEmptyBlock =
false;
6173 IncrementallySortBlocks();
6178 if(pBlockToDelete != VMA_NULL)
6180 VMA_DEBUG_LOG(
" Deleted empty allocation");
6181 pBlockToDelete->Destroy(m_hAllocator);
6182 vma_delete(m_hAllocator, pBlockToDelete);
6186 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6188 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6190 if(m_Blocks[blockIndex] == pBlock)
6192 VmaVectorRemove(m_Blocks, blockIndex);
6199 void VmaBlockVector::IncrementallySortBlocks()
6202 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6204 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6206 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6212 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6214 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6215 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6216 allocInfo.allocationSize = blockSize;
6217 VkDeviceMemory mem = VK_NULL_HANDLE;
6218 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6227 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6231 allocInfo.allocationSize);
6233 m_Blocks.push_back(pBlock);
6234 if(pNewBlockIndex != VMA_NULL)
6236 *pNewBlockIndex = m_Blocks.size() - 1;
6242 #if VMA_STATS_STRING_ENABLED 6244 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6246 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6252 json.WriteString(
"MemoryTypeIndex");
6253 json.WriteNumber(m_MemoryTypeIndex);
6255 json.WriteString(
"BlockSize");
6256 json.WriteNumber(m_PreferredBlockSize);
6258 json.WriteString(
"BlockCount");
6259 json.BeginObject(
true);
6260 if(m_MinBlockCount > 0)
6262 json.WriteString(
"Min");
6263 json.WriteNumber(m_MinBlockCount);
6265 if(m_MaxBlockCount < SIZE_MAX)
6267 json.WriteString(
"Max");
6268 json.WriteNumber(m_MaxBlockCount);
6270 json.WriteString(
"Cur");
6271 json.WriteNumber(m_Blocks.size());
6274 if(m_FrameInUseCount > 0)
6276 json.WriteString(
"FrameInUseCount");
6277 json.WriteNumber(m_FrameInUseCount);
6282 json.WriteString(
"PreferredBlockSize");
6283 json.WriteNumber(m_PreferredBlockSize);
6286 json.WriteString(
"Blocks");
6288 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6290 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6297 #endif // #if VMA_STATS_STRING_ENABLED 6299 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6300 VmaAllocator hAllocator,
6301 uint32_t currentFrameIndex)
6303 if(m_pDefragmentator == VMA_NULL)
6305 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6311 return m_pDefragmentator;
6314 VkResult VmaBlockVector::Defragment(
6316 VkDeviceSize& maxBytesToMove,
6317 uint32_t& maxAllocationsToMove)
6319 if(m_pDefragmentator == VMA_NULL)
6324 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6327 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6330 if(pDefragmentationStats != VMA_NULL)
6332 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6333 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6336 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6337 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6343 m_HasEmptyBlock =
false;
6344 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6346 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6347 if(pBlock->m_Metadata.IsEmpty())
6349 if(m_Blocks.size() > m_MinBlockCount)
6351 if(pDefragmentationStats != VMA_NULL)
6354 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6357 VmaVectorRemove(m_Blocks, blockIndex);
6358 pBlock->Destroy(m_hAllocator);
6359 vma_delete(m_hAllocator, pBlock);
6363 m_HasEmptyBlock =
true;
6371 void VmaBlockVector::DestroyDefragmentator()
6373 if(m_pDefragmentator != VMA_NULL)
6375 vma_delete(m_hAllocator, m_pDefragmentator);
6376 m_pDefragmentator = VMA_NULL;
6380 void VmaBlockVector::MakePoolAllocationsLost(
6381 uint32_t currentFrameIndex,
6382 size_t* pLostAllocationCount)
6384 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6386 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6388 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6390 pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6394 void VmaBlockVector::AddStats(
VmaStats* pStats)
6396 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6397 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6399 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6401 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6403 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6405 VMA_HEAVY_ASSERT(pBlock->Validate());
6407 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6408 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6409 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6410 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6417 VmaDefragmentator::VmaDefragmentator(
6418 VmaAllocator hAllocator,
6419 VmaBlockVector* pBlockVector,
6420 uint32_t currentFrameIndex) :
6421 m_hAllocator(hAllocator),
6422 m_pBlockVector(pBlockVector),
6423 m_CurrentFrameIndex(currentFrameIndex),
6425 m_AllocationsMoved(0),
6426 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6427 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6431 VmaDefragmentator::~VmaDefragmentator()
6433 for(
size_t i = m_Blocks.size(); i--; )
6435 vma_delete(m_hAllocator, m_Blocks[i]);
6439 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6441 AllocationInfo allocInfo;
6442 allocInfo.m_hAllocation = hAlloc;
6443 allocInfo.m_pChanged = pChanged;
6444 m_Allocations.push_back(allocInfo);
6447 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6450 if(m_pMappedDataForDefragmentation)
6452 *ppMappedData = m_pMappedDataForDefragmentation;
6457 if(m_pBlock->m_Mapping.GetMappedData())
6459 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6464 VkResult res = m_pBlock->Map(hAllocator, &m_pMappedDataForDefragmentation);
6465 *ppMappedData = m_pMappedDataForDefragmentation;
6469 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6471 if(m_pMappedDataForDefragmentation != VMA_NULL)
6473 m_pBlock->Unmap(hAllocator);
6477 VkResult VmaDefragmentator::DefragmentRound(
6478 VkDeviceSize maxBytesToMove,
6479 uint32_t maxAllocationsToMove)
6481 if(m_Blocks.empty())
6486 size_t srcBlockIndex = m_Blocks.size() - 1;
6487 size_t srcAllocIndex = SIZE_MAX;
6493 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6495 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6498 if(srcBlockIndex == 0)
6505 srcAllocIndex = SIZE_MAX;
6510 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6514 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6515 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6517 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6518 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6519 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6520 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6523 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6525 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6526 VmaAllocationRequest dstAllocRequest;
6527 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6528 m_CurrentFrameIndex,
6529 m_pBlockVector->GetFrameInUseCount(),
6530 m_pBlockVector->GetBufferImageGranularity(),
6535 &dstAllocRequest) &&
6537 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6539 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6542 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6543 (m_BytesMoved + size > maxBytesToMove))
6545 return VK_INCOMPLETE;
6548 void* pDstMappedData = VMA_NULL;
6549 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6550 if(res != VK_SUCCESS)
6555 void* pSrcMappedData = VMA_NULL;
6556 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6557 if(res != VK_SUCCESS)
6564 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6565 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6566 static_cast<size_t>(size));
6568 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6569 pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6571 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6573 if(allocInfo.m_pChanged != VMA_NULL)
6575 *allocInfo.m_pChanged = VK_TRUE;
6578 ++m_AllocationsMoved;
6579 m_BytesMoved += size;
6581 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6589 if(srcAllocIndex > 0)
6595 if(srcBlockIndex > 0)
6598 srcAllocIndex = SIZE_MAX;
6608 VkResult VmaDefragmentator::Defragment(
6609 VkDeviceSize maxBytesToMove,
6610 uint32_t maxAllocationsToMove)
6612 if(m_Allocations.empty())
6618 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6619 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6621 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6622 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6623 m_Blocks.push_back(pBlockInfo);
6627 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6630 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6632 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6634 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6636 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6637 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6638 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6640 (*it)->m_Allocations.push_back(allocInfo);
6648 m_Allocations.clear();
6650 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6652 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6653 pBlockInfo->CalcHasNonMovableAllocations();
6654 pBlockInfo->SortAllocationsBySizeDescecnding();
6658 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6661 VkResult result = VK_SUCCESS;
6662 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6664 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6668 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6670 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6676 bool VmaDefragmentator::MoveMakesSense(
6677 size_t dstBlockIndex, VkDeviceSize dstOffset,
6678 size_t srcBlockIndex, VkDeviceSize srcOffset)
6680 if(dstBlockIndex < srcBlockIndex)
6684 if(dstBlockIndex > srcBlockIndex)
6688 if(dstOffset < srcOffset)
6701 m_hDevice(pCreateInfo->device),
6702 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6703 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6704 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6705 m_PreferredLargeHeapBlockSize(0),
6706 m_PreferredSmallHeapBlockSize(0),
6707 m_PhysicalDevice(pCreateInfo->physicalDevice),
6708 m_CurrentFrameIndex(0),
6709 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6713 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6714 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6715 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6717 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6718 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
6720 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6722 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6733 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6734 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6743 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6745 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6746 if(limit != VK_WHOLE_SIZE)
6748 m_HeapSizeLimit[heapIndex] = limit;
6749 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6751 m_MemProps.memoryHeaps[heapIndex].size = limit;
6757 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6759 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6761 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
6767 GetBufferImageGranularity(),
6772 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6776 VmaAllocator_T::~VmaAllocator_T()
6778 VMA_ASSERT(m_Pools.empty());
6780 for(
size_t i = GetMemoryTypeCount(); i--; )
6782 vma_delete(
this, m_pDedicatedAllocations[i]);
6783 vma_delete(
this, m_pBlockVectors[i]);
6787 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6789 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6790 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6791 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6792 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6793 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6794 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6795 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6796 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6797 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6798 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6799 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6800 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6801 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6802 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6803 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6804 if(m_UseKhrDedicatedAllocation)
6806 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
6807 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
6808 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
6809 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
6811 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6813 #define VMA_COPY_IF_NOT_NULL(funcName) \ 6814 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 6816 if(pVulkanFunctions != VMA_NULL)
6818 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
6819 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
6820 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
6821 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
6822 VMA_COPY_IF_NOT_NULL(vkMapMemory);
6823 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
6824 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
6825 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
6826 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
6827 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
6828 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
6829 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
6830 VMA_COPY_IF_NOT_NULL(vkCreateImage);
6831 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
6832 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
6833 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
6836 #undef VMA_COPY_IF_NOT_NULL 6840 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6841 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6842 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6843 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6844 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6845 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6846 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6847 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6848 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6849 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6850 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6851 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6852 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6853 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6854 if(m_UseKhrDedicatedAllocation)
6856 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
6857 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
6861 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6863 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6864 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6865 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE ||
6867 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0;
6868 return isSmallHeap ? m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6871 VkResult VmaAllocator_T::AllocateMemoryOfType(
6872 const VkMemoryRequirements& vkMemReq,
6873 bool dedicatedAllocation,
6874 VkBuffer dedicatedBuffer,
6875 VkImage dedicatedImage,
6877 uint32_t memTypeIndex,
6878 VmaSuballocationType suballocType,
6879 VmaAllocation* pAllocation)
6881 VMA_ASSERT(pAllocation != VMA_NULL);
6882 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6888 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
6893 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
6894 VMA_ASSERT(blockVector);
6896 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6897 bool preferDedicatedMemory =
6898 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
6899 dedicatedAllocation ||
6901 vkMemReq.size > preferredBlockSize / 2;
6903 if(preferDedicatedMemory &&
6905 finalCreateInfo.
pool == VK_NULL_HANDLE)
6914 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6918 return AllocateDedicatedMemory(
6932 VkResult res = blockVector->Allocate(
6934 m_CurrentFrameIndex.load(),
6939 if(res == VK_SUCCESS)
6947 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6951 res = AllocateDedicatedMemory(
6957 finalCreateInfo.pUserData,
6961 if(res == VK_SUCCESS)
6964 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
6970 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6977 VkResult VmaAllocator_T::AllocateDedicatedMemory(
6979 VmaSuballocationType suballocType,
6980 uint32_t memTypeIndex,
6982 bool isUserDataString,
6984 VkBuffer dedicatedBuffer,
6985 VkImage dedicatedImage,
6986 VmaAllocation* pAllocation)
6988 VMA_ASSERT(pAllocation);
6990 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6991 allocInfo.memoryTypeIndex = memTypeIndex;
6992 allocInfo.allocationSize = size;
6994 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
6995 if(m_UseKhrDedicatedAllocation)
6997 if(dedicatedBuffer != VK_NULL_HANDLE)
6999 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7000 dedicatedAllocInfo.buffer = dedicatedBuffer;
7001 allocInfo.pNext = &dedicatedAllocInfo;
7003 else if(dedicatedImage != VK_NULL_HANDLE)
7005 dedicatedAllocInfo.image = dedicatedImage;
7006 allocInfo.pNext = &dedicatedAllocInfo;
7011 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7012 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7015 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7019 void* pMappedData =
nullptr;
7022 res = (*m_VulkanFunctions.vkMapMemory)(
7031 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7032 FreeVulkanMemory(memTypeIndex, size, hMemory);
7037 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7038 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7039 (*pAllocation)->SetUserData(
this, pUserData);
7043 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7044 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7045 VMA_ASSERT(pDedicatedAllocations);
7046 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7049 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7054 void VmaAllocator_T::GetBufferMemoryRequirements(
7056 VkMemoryRequirements& memReq,
7057 bool& requiresDedicatedAllocation,
7058 bool& prefersDedicatedAllocation)
const 7060 if(m_UseKhrDedicatedAllocation)
7062 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7063 memReqInfo.buffer = hBuffer;
7065 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7067 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7068 memReq2.pNext = &memDedicatedReq;
7070 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7072 memReq = memReq2.memoryRequirements;
7073 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7074 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7078 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7079 requiresDedicatedAllocation =
false;
7080 prefersDedicatedAllocation =
false;
7084 void VmaAllocator_T::GetImageMemoryRequirements(
7086 VkMemoryRequirements& memReq,
7087 bool& requiresDedicatedAllocation,
7088 bool& prefersDedicatedAllocation)
const 7090 if(m_UseKhrDedicatedAllocation)
7092 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7093 memReqInfo.image = hImage;
7095 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7097 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7098 memReq2.pNext = &memDedicatedReq;
7100 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7102 memReq = memReq2.memoryRequirements;
7103 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7104 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7108 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7109 requiresDedicatedAllocation =
false;
7110 prefersDedicatedAllocation =
false;
7114 VkResult VmaAllocator_T::AllocateMemory(
7115 const VkMemoryRequirements& vkMemReq,
7116 bool requiresDedicatedAllocation,
7117 bool prefersDedicatedAllocation,
7118 VkBuffer dedicatedBuffer,
7119 VkImage dedicatedImage,
7121 VmaSuballocationType suballocType,
7122 VmaAllocation* pAllocation)
7127 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7128 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7133 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7134 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7136 if(requiresDedicatedAllocation)
7140 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7141 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7143 if(createInfo.
pool != VK_NULL_HANDLE)
7145 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7146 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7149 if((createInfo.
pool != VK_NULL_HANDLE) &&
7152 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7153 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7156 if(createInfo.
pool != VK_NULL_HANDLE)
7158 return createInfo.
pool->m_BlockVector.Allocate(
7160 m_CurrentFrameIndex.load(),
7169 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7170 uint32_t memTypeIndex = UINT32_MAX;
7172 if(res == VK_SUCCESS)
7174 res = AllocateMemoryOfType(
7176 requiresDedicatedAllocation || prefersDedicatedAllocation,
7184 if(res == VK_SUCCESS)
7194 memoryTypeBits &= ~(1u << memTypeIndex);
7197 if(res == VK_SUCCESS)
7199 res = AllocateMemoryOfType(
7201 requiresDedicatedAllocation || prefersDedicatedAllocation,
7209 if(res == VK_SUCCESS)
7219 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7230 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7232 VMA_ASSERT(allocation);
7234 if(allocation->CanBecomeLost() ==
false ||
7235 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7237 switch(allocation->GetType())
7239 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7241 VmaBlockVector* pBlockVector = VMA_NULL;
7242 VmaPool hPool = allocation->GetPool();
7243 if(hPool != VK_NULL_HANDLE)
7245 pBlockVector = &hPool->m_BlockVector;
7249 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7250 pBlockVector = m_pBlockVectors[memTypeIndex];
7252 pBlockVector->Free(allocation);
7255 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7256 FreeDedicatedMemory(allocation);
7263 allocation->SetUserData(
this, VMA_NULL);
7264 vma_delete(
this, allocation);
7267 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7270 InitStatInfo(pStats->
total);
7271 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7273 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7277 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7279 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7280 VMA_ASSERT(pBlockVector);
7281 pBlockVector->AddStats(pStats);
7286 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7287 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7289 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7294 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7296 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7297 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7298 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7299 VMA_ASSERT(pDedicatedAllocVector);
7300 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7303 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7304 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7305 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7306 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7311 VmaPostprocessCalcStatInfo(pStats->
total);
7312 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7313 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7314 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7315 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7318 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7320 VkResult VmaAllocator_T::Defragment(
7321 VmaAllocation* pAllocations,
7322 size_t allocationCount,
7323 VkBool32* pAllocationsChanged,
7327 if(pAllocationsChanged != VMA_NULL)
7329 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7331 if(pDefragmentationStats != VMA_NULL)
7333 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7336 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7338 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7340 const size_t poolCount = m_Pools.size();
7343 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7345 VmaAllocation hAlloc = pAllocations[allocIndex];
7347 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7349 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7351 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7353 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7355 VmaBlockVector* pAllocBlockVector =
nullptr;
7357 const VmaPool hAllocPool = hAlloc->GetPool();
7359 if(hAllocPool != VK_NULL_HANDLE)
7361 pAllocBlockVector = &hAllocPool->GetBlockVector();
7366 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7369 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7371 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7372 &pAllocationsChanged[allocIndex] : VMA_NULL;
7373 pDefragmentator->AddAllocation(hAlloc, pChanged);
7377 VkResult result = VK_SUCCESS;
7381 VkDeviceSize maxBytesToMove = SIZE_MAX;
7382 uint32_t maxAllocationsToMove = UINT32_MAX;
7383 if(pDefragmentationInfo != VMA_NULL)
7390 for(uint32_t memTypeIndex = 0;
7391 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7395 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7397 result = m_pBlockVectors[memTypeIndex]->Defragment(
7398 pDefragmentationStats,
7400 maxAllocationsToMove);
7405 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7407 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7408 pDefragmentationStats,
7410 maxAllocationsToMove);
7416 for(
size_t poolIndex = poolCount; poolIndex--; )
7418 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7422 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7424 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7426 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7433 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7435 if(hAllocation->CanBecomeLost())
7441 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7442 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7445 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7449 pAllocationInfo->
offset = 0;
7450 pAllocationInfo->
size = hAllocation->GetSize();
7452 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7455 else if(localLastUseFrameIndex == localCurrFrameIndex)
7457 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7458 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7459 pAllocationInfo->
offset = hAllocation->GetOffset();
7460 pAllocationInfo->
size = hAllocation->GetSize();
7462 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7467 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7469 localLastUseFrameIndex = localCurrFrameIndex;
7476 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7477 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7478 pAllocationInfo->
offset = hAllocation->GetOffset();
7479 pAllocationInfo->
size = hAllocation->GetSize();
7480 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7481 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7485 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7487 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7500 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7502 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7503 if(res != VK_SUCCESS)
7505 vma_delete(
this, *pPool);
7512 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7513 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7519 void VmaAllocator_T::DestroyPool(VmaPool pool)
7523 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7524 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7525 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7528 vma_delete(
this, pool);
7531 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7533 pool->m_BlockVector.GetPoolStats(pPoolStats);
7536 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7538 m_CurrentFrameIndex.store(frameIndex);
7541 void VmaAllocator_T::MakePoolAllocationsLost(
7543 size_t* pLostAllocationCount)
7545 hPool->m_BlockVector.MakePoolAllocationsLost(
7546 m_CurrentFrameIndex.load(),
7547 pLostAllocationCount);
7550 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7552 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7553 (*pAllocation)->InitLost();
7556 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7558 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7561 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7563 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7564 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7566 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7567 if(res == VK_SUCCESS)
7569 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7574 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7579 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7582 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7584 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7590 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7592 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7594 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7597 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7599 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7600 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7602 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7603 m_HeapSizeLimit[heapIndex] += size;
7607 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
7609 if(hAllocation->CanBecomeLost())
7611 return VK_ERROR_MEMORY_MAP_FAILED;
7614 switch(hAllocation->GetType())
7616 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7618 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7619 char *pBytes =
nullptr;
7620 VkResult res = pBlock->Map(
this, (
void**)&pBytes);
7621 if(res == VK_SUCCESS)
7623 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
7624 hAllocation->BlockAllocMap();
7628 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7629 return hAllocation->DedicatedAllocMap(
this, ppData);
7632 return VK_ERROR_MEMORY_MAP_FAILED;
7636 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
7638 switch(hAllocation->GetType())
7640 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7642 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7643 hAllocation->BlockAllocUnmap();
7644 pBlock->Unmap(
this);
7647 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7648 hAllocation->DedicatedAllocUnmap(
this);
7655 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7657 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7659 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7661 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7662 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7663 VMA_ASSERT(pDedicatedAllocations);
7664 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7665 VMA_ASSERT(success);
7668 VkDeviceMemory hMemory = allocation->GetMemory();
7670 if(allocation->GetMappedData() != VMA_NULL)
7672 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7675 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7677 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7680 #if VMA_STATS_STRING_ENABLED 7682 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7684 bool dedicatedAllocationsStarted =
false;
7685 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7687 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7688 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7689 VMA_ASSERT(pDedicatedAllocVector);
7690 if(pDedicatedAllocVector->empty() ==
false)
7692 if(dedicatedAllocationsStarted ==
false)
7694 dedicatedAllocationsStarted =
true;
7695 json.WriteString(
"DedicatedAllocations");
7699 json.BeginString(
"Type ");
7700 json.ContinueString(memTypeIndex);
7705 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7707 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7708 json.BeginObject(
true);
7710 json.WriteString(
"Type");
7711 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7713 json.WriteString(
"Size");
7714 json.WriteNumber(hAlloc->GetSize());
7716 const void* pUserData = hAlloc->GetUserData();
7717 if(pUserData != VMA_NULL)
7719 json.WriteString(
"UserData");
7720 if(hAlloc->IsUserDataString())
7722 json.WriteString((
const char*)pUserData);
7727 json.ContinueString_Pointer(pUserData);
7738 if(dedicatedAllocationsStarted)
7744 bool allocationsStarted =
false;
7745 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7747 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
7749 if(allocationsStarted ==
false)
7751 allocationsStarted =
true;
7752 json.WriteString(
"DefaultPools");
7756 json.BeginString(
"Type ");
7757 json.ContinueString(memTypeIndex);
7760 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
7763 if(allocationsStarted)
7770 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7771 const size_t poolCount = m_Pools.size();
7774 json.WriteString(
"Pools");
7776 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7778 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7785 #endif // #if VMA_STATS_STRING_ENABLED 7787 static VkResult AllocateMemoryForImage(
7788 VmaAllocator allocator,
7791 VmaSuballocationType suballocType,
7792 VmaAllocation* pAllocation)
7794 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7796 VkMemoryRequirements vkMemReq = {};
7797 bool requiresDedicatedAllocation =
false;
7798 bool prefersDedicatedAllocation =
false;
7799 allocator->GetImageMemoryRequirements(image, vkMemReq,
7800 requiresDedicatedAllocation, prefersDedicatedAllocation);
7802 return allocator->AllocateMemory(
7804 requiresDedicatedAllocation,
7805 prefersDedicatedAllocation,
7808 *pAllocationCreateInfo,
7818 VmaAllocator* pAllocator)
7820 VMA_ASSERT(pCreateInfo && pAllocator);
7821 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7827 VmaAllocator allocator)
7829 if(allocator != VK_NULL_HANDLE)
7831 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7832 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7833 vma_delete(&allocationCallbacks, allocator);
7838 VmaAllocator allocator,
7839 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7841 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7842 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7846 VmaAllocator allocator,
7847 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7849 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7850 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7854 VmaAllocator allocator,
7855 uint32_t memoryTypeIndex,
7856 VkMemoryPropertyFlags* pFlags)
7858 VMA_ASSERT(allocator && pFlags);
7859 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7860 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7864 VmaAllocator allocator,
7865 uint32_t frameIndex)
7867 VMA_ASSERT(allocator);
7868 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7870 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7872 allocator->SetCurrentFrameIndex(frameIndex);
7876 VmaAllocator allocator,
7879 VMA_ASSERT(allocator && pStats);
7880 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7881 allocator->CalculateStats(pStats);
7884 #if VMA_STATS_STRING_ENABLED 7887 VmaAllocator allocator,
7888 char** ppStatsString,
7889 VkBool32 detailedMap)
7891 VMA_ASSERT(allocator && ppStatsString);
7892 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7894 VmaStringBuilder sb(allocator);
7896 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7900 allocator->CalculateStats(&stats);
7902 json.WriteString(
"Total");
7903 VmaPrintStatInfo(json, stats.
total);
7905 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7907 json.BeginString(
"Heap ");
7908 json.ContinueString(heapIndex);
7912 json.WriteString(
"Size");
7913 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7915 json.WriteString(
"Flags");
7916 json.BeginArray(
true);
7917 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7919 json.WriteString(
"DEVICE_LOCAL");
7925 json.WriteString(
"Stats");
7926 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7929 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7931 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7933 json.BeginString(
"Type ");
7934 json.ContinueString(typeIndex);
7939 json.WriteString(
"Flags");
7940 json.BeginArray(
true);
7941 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7942 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7944 json.WriteString(
"DEVICE_LOCAL");
7946 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7948 json.WriteString(
"HOST_VISIBLE");
7950 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7952 json.WriteString(
"HOST_COHERENT");
7954 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7956 json.WriteString(
"HOST_CACHED");
7958 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7960 json.WriteString(
"LAZILY_ALLOCATED");
7966 json.WriteString(
"Stats");
7967 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7976 if(detailedMap == VK_TRUE)
7978 allocator->PrintDetailedMap(json);
7984 const size_t len = sb.GetLength();
7985 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7988 memcpy(pChars, sb.GetData(), len);
7991 *ppStatsString = pChars;
7995 VmaAllocator allocator,
7998 if(pStatsString != VMA_NULL)
8000 VMA_ASSERT(allocator);
8001 size_t len = strlen(pStatsString);
8002 vma_delete_array(allocator, pStatsString, len + 1);
8006 #endif // #if VMA_STATS_STRING_ENABLED 8012 VmaAllocator allocator,
8013 uint32_t memoryTypeBits,
8015 uint32_t* pMemoryTypeIndex)
8017 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8018 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8019 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8026 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8030 switch(pAllocationCreateInfo->
usage)
8035 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8038 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8041 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8042 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8045 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8046 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8052 *pMemoryTypeIndex = UINT32_MAX;
8053 uint32_t minCost = UINT32_MAX;
8054 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8055 memTypeIndex < allocator->GetMemoryTypeCount();
8056 ++memTypeIndex, memTypeBit <<= 1)
8059 if((memTypeBit & memoryTypeBits) != 0)
8061 const VkMemoryPropertyFlags currFlags =
8062 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8064 if((requiredFlags & ~currFlags) == 0)
8067 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8069 if(currCost < minCost)
8071 *pMemoryTypeIndex = memTypeIndex;
8081 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8085 VmaAllocator allocator,
8089 VMA_ASSERT(allocator && pCreateInfo && pPool);
8091 VMA_DEBUG_LOG(
"vmaCreatePool");
8093 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8095 return allocator->CreatePool(pCreateInfo, pPool);
8099 VmaAllocator allocator,
8102 VMA_ASSERT(allocator);
8104 if(pool == VK_NULL_HANDLE)
8109 VMA_DEBUG_LOG(
"vmaDestroyPool");
8111 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8113 allocator->DestroyPool(pool);
8117 VmaAllocator allocator,
8121 VMA_ASSERT(allocator && pool && pPoolStats);
8123 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8125 allocator->GetPoolStats(pool, pPoolStats);
8129 VmaAllocator allocator,
8131 size_t* pLostAllocationCount)
8133 VMA_ASSERT(allocator && pool);
8135 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8137 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8141 VmaAllocator allocator,
8142 const VkMemoryRequirements* pVkMemoryRequirements,
8144 VmaAllocation* pAllocation,
8147 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8149 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8151 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8153 VkResult result = allocator->AllocateMemory(
8154 *pVkMemoryRequirements,
8160 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8163 if(pAllocationInfo && result == VK_SUCCESS)
8165 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8172 VmaAllocator allocator,
8175 VmaAllocation* pAllocation,
8178 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8180 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8182 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8184 VkMemoryRequirements vkMemReq = {};
8185 bool requiresDedicatedAllocation =
false;
8186 bool prefersDedicatedAllocation =
false;
8187 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8188 requiresDedicatedAllocation,
8189 prefersDedicatedAllocation);
8191 VkResult result = allocator->AllocateMemory(
8193 requiresDedicatedAllocation,
8194 prefersDedicatedAllocation,
8198 VMA_SUBALLOCATION_TYPE_BUFFER,
8201 if(pAllocationInfo && result == VK_SUCCESS)
8203 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8210 VmaAllocator allocator,
8213 VmaAllocation* pAllocation,
8216 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8218 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8220 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8222 VkResult result = AllocateMemoryForImage(
8226 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8229 if(pAllocationInfo && result == VK_SUCCESS)
8231 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8238 VmaAllocator allocator,
8239 VmaAllocation allocation)
8241 VMA_ASSERT(allocator && allocation);
8243 VMA_DEBUG_LOG(
"vmaFreeMemory");
8245 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8247 allocator->FreeMemory(allocation);
8251 VmaAllocator allocator,
8252 VmaAllocation allocation,
8255 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8257 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8259 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8263 VmaAllocator allocator,
8264 VmaAllocation allocation,
8267 VMA_ASSERT(allocator && allocation);
8269 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8271 allocation->SetUserData(allocator, pUserData);
8275 VmaAllocator allocator,
8276 VmaAllocation* pAllocation)
8278 VMA_ASSERT(allocator && pAllocation);
8280 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8282 allocator->CreateLostAllocation(pAllocation);
8286 VmaAllocator allocator,
8287 VmaAllocation allocation,
8290 VMA_ASSERT(allocator && allocation && ppData);
8292 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8294 return allocator->Map(allocation, ppData);
8298 VmaAllocator allocator,
8299 VmaAllocation allocation)
8301 VMA_ASSERT(allocator && allocation);
8303 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8305 allocator->Unmap(allocation);
8309 VmaAllocator allocator,
8310 VmaAllocation* pAllocations,
8311 size_t allocationCount,
8312 VkBool32* pAllocationsChanged,
8316 VMA_ASSERT(allocator && pAllocations);
8318 VMA_DEBUG_LOG(
"vmaDefragment");
8320 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8322 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8326 VmaAllocator allocator,
8327 const VkBufferCreateInfo* pBufferCreateInfo,
8330 VmaAllocation* pAllocation,
8333 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8335 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8337 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8339 *pBuffer = VK_NULL_HANDLE;
8340 *pAllocation = VK_NULL_HANDLE;
8343 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8344 allocator->m_hDevice,
8346 allocator->GetAllocationCallbacks(),
8351 VkMemoryRequirements vkMemReq = {};
8352 bool requiresDedicatedAllocation =
false;
8353 bool prefersDedicatedAllocation =
false;
8354 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8355 requiresDedicatedAllocation, prefersDedicatedAllocation);
8359 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8361 VMA_ASSERT(vkMemReq.alignment %
8362 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8364 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8366 VMA_ASSERT(vkMemReq.alignment %
8367 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8369 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8371 VMA_ASSERT(vkMemReq.alignment %
8372 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8376 res = allocator->AllocateMemory(
8378 requiresDedicatedAllocation,
8379 prefersDedicatedAllocation,
8382 *pAllocationCreateInfo,
8383 VMA_SUBALLOCATION_TYPE_BUFFER,
8388 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8389 allocator->m_hDevice,
8391 (*pAllocation)->GetMemory(),
8392 (*pAllocation)->GetOffset());
8396 if(pAllocationInfo != VMA_NULL)
8398 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8402 allocator->FreeMemory(*pAllocation);
8403 *pAllocation = VK_NULL_HANDLE;
8404 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8405 *pBuffer = VK_NULL_HANDLE;
8408 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8409 *pBuffer = VK_NULL_HANDLE;
8416 VmaAllocator allocator,
8418 VmaAllocation allocation)
8420 if(buffer != VK_NULL_HANDLE)
8422 VMA_ASSERT(allocator);
8424 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8426 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8428 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8430 allocator->FreeMemory(allocation);
8435 VmaAllocator allocator,
8436 const VkImageCreateInfo* pImageCreateInfo,
8439 VmaAllocation* pAllocation,
8442 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8444 VMA_DEBUG_LOG(
"vmaCreateImage");
8446 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8448 *pImage = VK_NULL_HANDLE;
8449 *pAllocation = VK_NULL_HANDLE;
8452 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8453 allocator->m_hDevice,
8455 allocator->GetAllocationCallbacks(),
8459 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8460 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8461 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8464 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8468 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8469 allocator->m_hDevice,
8471 (*pAllocation)->GetMemory(),
8472 (*pAllocation)->GetOffset());
8476 if(pAllocationInfo != VMA_NULL)
8478 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8482 allocator->FreeMemory(*pAllocation);
8483 *pAllocation = VK_NULL_HANDLE;
8484 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8485 *pImage = VK_NULL_HANDLE;
8488 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8489 *pImage = VK_NULL_HANDLE;
8496 VmaAllocator allocator,
8498 VmaAllocation allocation)
8500 if(image != VK_NULL_HANDLE)
8502 VMA_ASSERT(allocator);
8504 VMA_DEBUG_LOG(
"vmaDestroyImage");
8506 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8508 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8510 allocator->FreeMemory(allocation);
8514 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:758
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1005
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:783
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:768
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:968
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:762
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1273
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:780
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1439
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1143
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1197
Definition: vk_mem_alloc.h:1042
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:751
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1080
Definition: vk_mem_alloc.h:989
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:795
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:848
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:777
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MiB...
Definition: vk_mem_alloc.h:792
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:993
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:913
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:765
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:912
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:773
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1443
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:812
VmaStatInfo total
Definition: vk_mem_alloc.h:922
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1451
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1064
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1434
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:766
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:693
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:786
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1151
Definition: vk_mem_alloc.h:1145
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1283
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:763
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1101
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1167
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1203
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:749
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1154
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
VmaMemoryUsage
Definition: vk_mem_alloc.h:950
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1429
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1447
Definition: vk_mem_alloc.h:983
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1088
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:764
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:918
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:699
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:720
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:725
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1449
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1075
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1213
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:759
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:901
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1162
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:712
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1049
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:914
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:716
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1157
Definition: vk_mem_alloc.h:988
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1070
Definition: vk_mem_alloc.h:1061
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:904
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:761
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1175
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:798
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1206
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1059
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1094
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:836
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:920
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1029
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:913
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:770
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:714
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:769
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1189
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1297
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:789
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:913
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:910
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1194
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1278
Definition: vk_mem_alloc.h:1057
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1445
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:757
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:772
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:908
Definition: vk_mem_alloc.h:955
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1147
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:906
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:767
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:771
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1016
Definition: vk_mem_alloc.h:977
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1292
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:747
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:760
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1259
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1125
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:914
Definition: vk_mem_alloc.h:1055
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:921
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1200
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:914
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1264