23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 688 #include <vulkan/vulkan.h> 690 VK_DEFINE_HANDLE(VmaAllocator)
694 VmaAllocator allocator,
696 VkDeviceMemory memory,
700 VmaAllocator allocator,
702 VkDeviceMemory memory,
851 VmaAllocator* pAllocator);
855 VmaAllocator allocator);
862 VmaAllocator allocator,
863 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
870 VmaAllocator allocator,
871 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
880 VmaAllocator allocator,
881 uint32_t memoryTypeIndex,
882 VkMemoryPropertyFlags* pFlags);
893 VmaAllocator allocator,
894 uint32_t frameIndex);
924 VmaAllocator allocator,
927 #define VMA_STATS_STRING_ENABLED 1 929 #if VMA_STATS_STRING_ENABLED 935 VmaAllocator allocator,
936 char** ppStatsString,
937 VkBool32 detailedMap);
940 VmaAllocator allocator,
943 #endif // #if VMA_STATS_STRING_ENABLED 945 VK_DEFINE_HANDLE(VmaPool)
1123 VmaAllocator allocator,
1124 uint32_t memoryTypeBits,
1126 uint32_t* pMemoryTypeIndex);
1227 VmaAllocator allocator,
1234 VmaAllocator allocator,
1244 VmaAllocator allocator,
1255 VmaAllocator allocator,
1257 size_t* pLostAllocationCount);
1259 VK_DEFINE_HANDLE(VmaAllocation)
1315 VmaAllocator allocator,
1316 const VkMemoryRequirements* pVkMemoryRequirements,
1318 VmaAllocation* pAllocation,
1328 VmaAllocator allocator,
1331 VmaAllocation* pAllocation,
1336 VmaAllocator allocator,
1339 VmaAllocation* pAllocation,
1344 VmaAllocator allocator,
1345 VmaAllocation allocation);
1349 VmaAllocator allocator,
1350 VmaAllocation allocation,
1367 VmaAllocator allocator,
1368 VmaAllocation allocation,
1382 VmaAllocator allocator,
1383 VmaAllocation* pAllocation);
1420 VmaAllocator allocator,
1421 VmaAllocation allocation,
1429 VmaAllocator allocator,
1430 VmaAllocation allocation);
1535 VmaAllocator allocator,
1536 VmaAllocation* pAllocations,
1537 size_t allocationCount,
1538 VkBool32* pAllocationsChanged,
1569 VmaAllocator allocator,
1570 const VkBufferCreateInfo* pBufferCreateInfo,
1573 VmaAllocation* pAllocation,
1588 VmaAllocator allocator,
1590 VmaAllocation allocation);
1594 VmaAllocator allocator,
1595 const VkImageCreateInfo* pImageCreateInfo,
1598 VmaAllocation* pAllocation,
1613 VmaAllocator allocator,
1615 VmaAllocation allocation);
1621 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1624 #ifdef __INTELLISENSE__ 1625 #define VMA_IMPLEMENTATION 1628 #ifdef VMA_IMPLEMENTATION 1629 #undef VMA_IMPLEMENTATION 1651 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 1652 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1664 #if VMA_USE_STL_CONTAINERS 1665 #define VMA_USE_STL_VECTOR 1 1666 #define VMA_USE_STL_UNORDERED_MAP 1 1667 #define VMA_USE_STL_LIST 1 1670 #if VMA_USE_STL_VECTOR 1674 #if VMA_USE_STL_UNORDERED_MAP 1675 #include <unordered_map> 1678 #if VMA_USE_STL_LIST 1687 #include <algorithm> 1691 #if !defined(_WIN32) 1698 #define VMA_ASSERT(expr) assert(expr) 1700 #define VMA_ASSERT(expr) 1706 #ifndef VMA_HEAVY_ASSERT 1708 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1710 #define VMA_HEAVY_ASSERT(expr) 1716 #define VMA_NULL nullptr 1719 #ifndef VMA_ALIGN_OF 1720 #define VMA_ALIGN_OF(type) (__alignof(type)) 1723 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1725 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1727 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1731 #ifndef VMA_SYSTEM_FREE 1733 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1735 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1740 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1744 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1748 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1752 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1755 #ifndef VMA_DEBUG_LOG 1756 #define VMA_DEBUG_LOG(format, ...) 1766 #if VMA_STATS_STRING_ENABLED 1767 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1769 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1771 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1773 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1775 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1777 snprintf(outStr, strLen,
"%p", ptr);
1787 void Lock() { m_Mutex.lock(); }
1788 void Unlock() { m_Mutex.unlock(); }
1792 #define VMA_MUTEX VmaMutex 1803 #ifndef VMA_ATOMIC_UINT32 1804 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1807 #ifndef VMA_BEST_FIT 1820 #define VMA_BEST_FIT (1) 1823 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 1828 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 1831 #ifndef VMA_DEBUG_ALIGNMENT 1836 #define VMA_DEBUG_ALIGNMENT (1) 1839 #ifndef VMA_DEBUG_MARGIN 1844 #define VMA_DEBUG_MARGIN (0) 1847 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1852 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1855 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1860 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1863 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1864 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 1868 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1869 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 1873 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1879 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1880 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1883 static inline uint32_t VmaCountBitsSet(uint32_t v)
1885 uint32_t c = v - ((v >> 1) & 0x55555555);
1886 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1887 c = ((c >> 4) + c) & 0x0F0F0F0F;
1888 c = ((c >> 8) + c) & 0x00FF00FF;
1889 c = ((c >> 16) + c) & 0x0000FFFF;
1895 template <
typename T>
1896 static inline T VmaAlignUp(T val, T align)
1898 return (val + align - 1) / align * align;
1902 template <
typename T>
1903 inline T VmaRoundDiv(T x, T y)
1905 return (x + (y / (T)2)) / y;
1910 template<
typename Iterator,
typename Compare>
1911 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1913 Iterator centerValue = end; --centerValue;
1914 Iterator insertIndex = beg;
1915 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1917 if(cmp(*memTypeIndex, *centerValue))
1919 if(insertIndex != memTypeIndex)
1921 VMA_SWAP(*memTypeIndex, *insertIndex);
1926 if(insertIndex != centerValue)
1928 VMA_SWAP(*insertIndex, *centerValue);
1933 template<
typename Iterator,
typename Compare>
1934 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1938 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1939 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1940 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1944 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1946 #endif // #ifndef VMA_SORT 1955 static inline bool VmaBlocksOnSamePage(
1956 VkDeviceSize resourceAOffset,
1957 VkDeviceSize resourceASize,
1958 VkDeviceSize resourceBOffset,
1959 VkDeviceSize pageSize)
1961 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1962 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1963 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1964 VkDeviceSize resourceBStart = resourceBOffset;
1965 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1966 return resourceAEndPage == resourceBStartPage;
1969 enum VmaSuballocationType
1971 VMA_SUBALLOCATION_TYPE_FREE = 0,
1972 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1973 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1974 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1975 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1976 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1977 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1986 static inline bool VmaIsBufferImageGranularityConflict(
1987 VmaSuballocationType suballocType1,
1988 VmaSuballocationType suballocType2)
1990 if(suballocType1 > suballocType2)
1992 VMA_SWAP(suballocType1, suballocType2);
1995 switch(suballocType1)
1997 case VMA_SUBALLOCATION_TYPE_FREE:
1999 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2001 case VMA_SUBALLOCATION_TYPE_BUFFER:
2003 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2004 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2005 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2007 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2008 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2009 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2010 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2012 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2013 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2025 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2026 m_pMutex(useMutex ? &mutex : VMA_NULL)
2043 VMA_MUTEX* m_pMutex;
2046 #if VMA_DEBUG_GLOBAL_MUTEX 2047 static VMA_MUTEX gDebugGlobalMutex;
2048 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2050 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2054 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2065 template <
typename IterT,
typename KeyT,
typename CmpT>
2066 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2068 size_t down = 0, up = (end - beg);
2071 const size_t mid = (down + up) / 2;
2072 if(cmp(*(beg+mid), key))
2087 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2089 if((pAllocationCallbacks != VMA_NULL) &&
2090 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2092 return (*pAllocationCallbacks->pfnAllocation)(
2093 pAllocationCallbacks->pUserData,
2096 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2100 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2104 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2106 if((pAllocationCallbacks != VMA_NULL) &&
2107 (pAllocationCallbacks->pfnFree != VMA_NULL))
2109 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2113 VMA_SYSTEM_FREE(ptr);
2117 template<
typename T>
2118 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2120 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2123 template<
typename T>
2124 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2126 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2129 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2131 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2133 template<
typename T>
2134 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2137 VmaFree(pAllocationCallbacks, ptr);
2140 template<
typename T>
2141 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2145 for(
size_t i = count; i--; )
2149 VmaFree(pAllocationCallbacks, ptr);
2154 template<
typename T>
2155 class VmaStlAllocator
2158 const VkAllocationCallbacks*
const m_pCallbacks;
2159 typedef T value_type;
2161 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2162 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2164 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2165 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2167 template<
typename U>
2168 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2170 return m_pCallbacks == rhs.m_pCallbacks;
2172 template<
typename U>
2173 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2175 return m_pCallbacks != rhs.m_pCallbacks;
2178 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2181 #if VMA_USE_STL_VECTOR 2183 #define VmaVector std::vector 2185 template<
typename T,
typename allocatorT>
2186 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2188 vec.insert(vec.begin() + index, item);
2191 template<
typename T,
typename allocatorT>
2192 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2194 vec.erase(vec.begin() + index);
2197 #else // #if VMA_USE_STL_VECTOR 2202 template<
typename T,
typename AllocatorT>
2206 typedef T value_type;
2208 VmaVector(
const AllocatorT& allocator) :
2209 m_Allocator(allocator),
2216 VmaVector(
size_t count,
const AllocatorT& allocator) :
2217 m_Allocator(allocator),
2218 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2224 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2225 m_Allocator(src.m_Allocator),
2226 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2227 m_Count(src.m_Count),
2228 m_Capacity(src.m_Count)
2232 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2238 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2241 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2245 resize(rhs.m_Count);
2248 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2254 bool empty()
const {
return m_Count == 0; }
2255 size_t size()
const {
return m_Count; }
2256 T* data() {
return m_pArray; }
2257 const T* data()
const {
return m_pArray; }
2259 T& operator[](
size_t index)
2261 VMA_HEAVY_ASSERT(index < m_Count);
2262 return m_pArray[index];
2264 const T& operator[](
size_t index)
const 2266 VMA_HEAVY_ASSERT(index < m_Count);
2267 return m_pArray[index];
2272 VMA_HEAVY_ASSERT(m_Count > 0);
2275 const T& front()
const 2277 VMA_HEAVY_ASSERT(m_Count > 0);
2282 VMA_HEAVY_ASSERT(m_Count > 0);
2283 return m_pArray[m_Count - 1];
2285 const T& back()
const 2287 VMA_HEAVY_ASSERT(m_Count > 0);
2288 return m_pArray[m_Count - 1];
2291 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2293 newCapacity = VMA_MAX(newCapacity, m_Count);
2295 if((newCapacity < m_Capacity) && !freeMemory)
2297 newCapacity = m_Capacity;
2300 if(newCapacity != m_Capacity)
2302 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2305 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2307 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2308 m_Capacity = newCapacity;
2309 m_pArray = newArray;
2313 void resize(
size_t newCount,
bool freeMemory =
false)
2315 size_t newCapacity = m_Capacity;
2316 if(newCount > m_Capacity)
2318 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2322 newCapacity = newCount;
2325 if(newCapacity != m_Capacity)
2327 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2328 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2329 if(elementsToCopy != 0)
2331 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2333 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2334 m_Capacity = newCapacity;
2335 m_pArray = newArray;
2341 void clear(
bool freeMemory =
false)
2343 resize(0, freeMemory);
2346 void insert(
size_t index,
const T& src)
2348 VMA_HEAVY_ASSERT(index <= m_Count);
2349 const size_t oldCount = size();
2350 resize(oldCount + 1);
2351 if(index < oldCount)
2353 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2355 m_pArray[index] = src;
2358 void remove(
size_t index)
2360 VMA_HEAVY_ASSERT(index < m_Count);
2361 const size_t oldCount = size();
2362 if(index < oldCount - 1)
2364 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2366 resize(oldCount - 1);
2369 void push_back(
const T& src)
2371 const size_t newIndex = size();
2372 resize(newIndex + 1);
2373 m_pArray[newIndex] = src;
2378 VMA_HEAVY_ASSERT(m_Count > 0);
2382 void push_front(
const T& src)
2389 VMA_HEAVY_ASSERT(m_Count > 0);
2393 typedef T* iterator;
2395 iterator begin() {
return m_pArray; }
2396 iterator end() {
return m_pArray + m_Count; }
2399 AllocatorT m_Allocator;
2405 template<
typename T,
typename allocatorT>
2406 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2408 vec.insert(index, item);
2411 template<
typename T,
typename allocatorT>
2412 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2417 #endif // #if VMA_USE_STL_VECTOR 2419 template<
typename CmpLess,
typename VectorT>
2420 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2422 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2424 vector.data() + vector.size(),
2426 CmpLess()) - vector.data();
2427 VmaVectorInsert(vector, indexToInsert, value);
2428 return indexToInsert;
2431 template<
typename CmpLess,
typename VectorT>
2432 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2435 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2440 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2442 size_t indexToRemove = it - vector.begin();
2443 VmaVectorRemove(vector, indexToRemove);
2449 template<
typename CmpLess,
typename VectorT>
2450 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2453 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2455 vector.data() + vector.size(),
2458 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2460 return it - vector.begin();
2464 return vector.size();
2476 template<
typename T>
2477 class VmaPoolAllocator
2480 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2481 ~VmaPoolAllocator();
2489 uint32_t NextFreeIndex;
2496 uint32_t FirstFreeIndex;
2499 const VkAllocationCallbacks* m_pAllocationCallbacks;
2500 size_t m_ItemsPerBlock;
2501 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2503 ItemBlock& CreateNewBlock();
2506 template<
typename T>
2507 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2508 m_pAllocationCallbacks(pAllocationCallbacks),
2509 m_ItemsPerBlock(itemsPerBlock),
2510 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2512 VMA_ASSERT(itemsPerBlock > 0);
2515 template<
typename T>
2516 VmaPoolAllocator<T>::~VmaPoolAllocator()
2521 template<
typename T>
2522 void VmaPoolAllocator<T>::Clear()
2524 for(
size_t i = m_ItemBlocks.size(); i--; )
2525 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2526 m_ItemBlocks.clear();
2529 template<
typename T>
2530 T* VmaPoolAllocator<T>::Alloc()
2532 for(
size_t i = m_ItemBlocks.size(); i--; )
2534 ItemBlock& block = m_ItemBlocks[i];
2536 if(block.FirstFreeIndex != UINT32_MAX)
2538 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2539 block.FirstFreeIndex = pItem->NextFreeIndex;
2540 return &pItem->Value;
2545 ItemBlock& newBlock = CreateNewBlock();
2546 Item*
const pItem = &newBlock.pItems[0];
2547 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2548 return &pItem->Value;
2551 template<
typename T>
2552 void VmaPoolAllocator<T>::Free(T* ptr)
2555 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2557 ItemBlock& block = m_ItemBlocks[i];
2561 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2564 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2566 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2567 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2568 block.FirstFreeIndex = index;
2572 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2575 template<
typename T>
2576 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2578 ItemBlock newBlock = {
2579 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2581 m_ItemBlocks.push_back(newBlock);
2584 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2585 newBlock.pItems[i].NextFreeIndex = i + 1;
2586 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2587 return m_ItemBlocks.back();
2593 #if VMA_USE_STL_LIST 2595 #define VmaList std::list 2597 #else // #if VMA_USE_STL_LIST 2599 template<
typename T>
2608 template<
typename T>
2612 typedef VmaListItem<T> ItemType;
2614 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2618 size_t GetCount()
const {
return m_Count; }
2619 bool IsEmpty()
const {
return m_Count == 0; }
2621 ItemType* Front() {
return m_pFront; }
2622 const ItemType* Front()
const {
return m_pFront; }
2623 ItemType* Back() {
return m_pBack; }
2624 const ItemType* Back()
const {
return m_pBack; }
2626 ItemType* PushBack();
2627 ItemType* PushFront();
2628 ItemType* PushBack(
const T& value);
2629 ItemType* PushFront(
const T& value);
2634 ItemType* InsertBefore(ItemType* pItem);
2636 ItemType* InsertAfter(ItemType* pItem);
2638 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2639 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2641 void Remove(ItemType* pItem);
2644 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2645 VmaPoolAllocator<ItemType> m_ItemAllocator;
2651 VmaRawList(
const VmaRawList<T>& src);
2652 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2655 template<
typename T>
2656 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2657 m_pAllocationCallbacks(pAllocationCallbacks),
2658 m_ItemAllocator(pAllocationCallbacks, 128),
2665 template<
typename T>
2666 VmaRawList<T>::~VmaRawList()
2672 template<
typename T>
2673 void VmaRawList<T>::Clear()
2675 if(IsEmpty() ==
false)
2677 ItemType* pItem = m_pBack;
2678 while(pItem != VMA_NULL)
2680 ItemType*
const pPrevItem = pItem->pPrev;
2681 m_ItemAllocator.Free(pItem);
2684 m_pFront = VMA_NULL;
2690 template<
typename T>
2691 VmaListItem<T>* VmaRawList<T>::PushBack()
2693 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2694 pNewItem->pNext = VMA_NULL;
2697 pNewItem->pPrev = VMA_NULL;
2698 m_pFront = pNewItem;
2704 pNewItem->pPrev = m_pBack;
2705 m_pBack->pNext = pNewItem;
2712 template<
typename T>
2713 VmaListItem<T>* VmaRawList<T>::PushFront()
2715 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2716 pNewItem->pPrev = VMA_NULL;
2719 pNewItem->pNext = VMA_NULL;
2720 m_pFront = pNewItem;
2726 pNewItem->pNext = m_pFront;
2727 m_pFront->pPrev = pNewItem;
2728 m_pFront = pNewItem;
2734 template<
typename T>
2735 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2737 ItemType*
const pNewItem = PushBack();
2738 pNewItem->Value = value;
2742 template<
typename T>
2743 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2745 ItemType*
const pNewItem = PushFront();
2746 pNewItem->Value = value;
2750 template<
typename T>
2751 void VmaRawList<T>::PopBack()
2753 VMA_HEAVY_ASSERT(m_Count > 0);
2754 ItemType*
const pBackItem = m_pBack;
2755 ItemType*
const pPrevItem = pBackItem->pPrev;
2756 if(pPrevItem != VMA_NULL)
2758 pPrevItem->pNext = VMA_NULL;
2760 m_pBack = pPrevItem;
2761 m_ItemAllocator.Free(pBackItem);
2765 template<
typename T>
2766 void VmaRawList<T>::PopFront()
2768 VMA_HEAVY_ASSERT(m_Count > 0);
2769 ItemType*
const pFrontItem = m_pFront;
2770 ItemType*
const pNextItem = pFrontItem->pNext;
2771 if(pNextItem != VMA_NULL)
2773 pNextItem->pPrev = VMA_NULL;
2775 m_pFront = pNextItem;
2776 m_ItemAllocator.Free(pFrontItem);
2780 template<
typename T>
2781 void VmaRawList<T>::Remove(ItemType* pItem)
2783 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2784 VMA_HEAVY_ASSERT(m_Count > 0);
2786 if(pItem->pPrev != VMA_NULL)
2788 pItem->pPrev->pNext = pItem->pNext;
2792 VMA_HEAVY_ASSERT(m_pFront == pItem);
2793 m_pFront = pItem->pNext;
2796 if(pItem->pNext != VMA_NULL)
2798 pItem->pNext->pPrev = pItem->pPrev;
2802 VMA_HEAVY_ASSERT(m_pBack == pItem);
2803 m_pBack = pItem->pPrev;
2806 m_ItemAllocator.Free(pItem);
2810 template<
typename T>
2811 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2813 if(pItem != VMA_NULL)
2815 ItemType*
const prevItem = pItem->pPrev;
2816 ItemType*
const newItem = m_ItemAllocator.Alloc();
2817 newItem->pPrev = prevItem;
2818 newItem->pNext = pItem;
2819 pItem->pPrev = newItem;
2820 if(prevItem != VMA_NULL)
2822 prevItem->pNext = newItem;
2826 VMA_HEAVY_ASSERT(m_pFront == pItem);
2836 template<
typename T>
2837 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2839 if(pItem != VMA_NULL)
2841 ItemType*
const nextItem = pItem->pNext;
2842 ItemType*
const newItem = m_ItemAllocator.Alloc();
2843 newItem->pNext = nextItem;
2844 newItem->pPrev = pItem;
2845 pItem->pNext = newItem;
2846 if(nextItem != VMA_NULL)
2848 nextItem->pPrev = newItem;
2852 VMA_HEAVY_ASSERT(m_pBack == pItem);
2862 template<
typename T>
2863 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2865 ItemType*
const newItem = InsertBefore(pItem);
2866 newItem->Value = value;
2870 template<
typename T>
2871 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2873 ItemType*
const newItem = InsertAfter(pItem);
2874 newItem->Value = value;
2878 template<
typename T,
typename AllocatorT>
2891 T& operator*()
const 2893 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2894 return m_pItem->Value;
2896 T* operator->()
const 2898 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2899 return &m_pItem->Value;
2902 iterator& operator++()
2904 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2905 m_pItem = m_pItem->pNext;
2908 iterator& operator--()
2910 if(m_pItem != VMA_NULL)
2912 m_pItem = m_pItem->pPrev;
2916 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2917 m_pItem = m_pList->Back();
2922 iterator operator++(
int)
2924 iterator result = *
this;
2928 iterator operator--(
int)
2930 iterator result = *
this;
2935 bool operator==(
const iterator& rhs)
const 2937 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2938 return m_pItem == rhs.m_pItem;
2940 bool operator!=(
const iterator& rhs)
const 2942 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2943 return m_pItem != rhs.m_pItem;
2947 VmaRawList<T>* m_pList;
2948 VmaListItem<T>* m_pItem;
2950 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2956 friend class VmaList<T, AllocatorT>;
2959 class const_iterator
2968 const_iterator(
const iterator& src) :
2969 m_pList(src.m_pList),
2970 m_pItem(src.m_pItem)
2974 const T& operator*()
const 2976 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2977 return m_pItem->Value;
2979 const T* operator->()
const 2981 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2982 return &m_pItem->Value;
2985 const_iterator& operator++()
2987 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2988 m_pItem = m_pItem->pNext;
2991 const_iterator& operator--()
2993 if(m_pItem != VMA_NULL)
2995 m_pItem = m_pItem->pPrev;
2999 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3000 m_pItem = m_pList->Back();
3005 const_iterator operator++(
int)
3007 const_iterator result = *
this;
3011 const_iterator operator--(
int)
3013 const_iterator result = *
this;
3018 bool operator==(
const const_iterator& rhs)
const 3020 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3021 return m_pItem == rhs.m_pItem;
3023 bool operator!=(
const const_iterator& rhs)
const 3025 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3026 return m_pItem != rhs.m_pItem;
3030 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3036 const VmaRawList<T>* m_pList;
3037 const VmaListItem<T>* m_pItem;
3039 friend class VmaList<T, AllocatorT>;
3042 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3044 bool empty()
const {
return m_RawList.IsEmpty(); }
3045 size_t size()
const {
return m_RawList.GetCount(); }
3047 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3048 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3050 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3051 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3053 void clear() { m_RawList.Clear(); }
3054 void push_back(
const T& value) { m_RawList.PushBack(value); }
3055 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3056 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3059 VmaRawList<T> m_RawList;
3062 #endif // #if VMA_USE_STL_LIST 3070 #if VMA_USE_STL_UNORDERED_MAP 3072 #define VmaPair std::pair 3074 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3075 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3077 #else // #if VMA_USE_STL_UNORDERED_MAP 3079 template<
typename T1,
typename T2>
3085 VmaPair() : first(), second() { }
3086 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3092 template<
typename KeyT,
typename ValueT>
3096 typedef VmaPair<KeyT, ValueT> PairType;
3097 typedef PairType* iterator;
3099 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3101 iterator begin() {
return m_Vector.begin(); }
3102 iterator end() {
return m_Vector.end(); }
3104 void insert(
const PairType& pair);
3105 iterator find(
const KeyT& key);
3106 void erase(iterator it);
3109 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3112 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3114 template<
typename FirstT,
typename SecondT>
3115 struct VmaPairFirstLess
3117 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3119 return lhs.first < rhs.first;
3121 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3123 return lhs.first < rhsFirst;
3127 template<
typename KeyT,
typename ValueT>
3128 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3130 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3132 m_Vector.data() + m_Vector.size(),
3134 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3135 VmaVectorInsert(m_Vector, indexToInsert, pair);
3138 template<
typename KeyT,
typename ValueT>
3139 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3141 PairType* it = VmaBinaryFindFirstNotLess(
3143 m_Vector.data() + m_Vector.size(),
3145 VmaPairFirstLess<KeyT, ValueT>());
3146 if((it != m_Vector.end()) && (it->first == key))
3152 return m_Vector.end();
3156 template<
typename KeyT,
typename ValueT>
3157 void VmaMap<KeyT, ValueT>::erase(iterator it)
3159 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3162 #endif // #if VMA_USE_STL_UNORDERED_MAP 3168 class VmaDeviceMemoryBlock;
3170 struct VmaAllocation_T
3173 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3177 FLAG_USER_DATA_STRING = 0x01,
3181 enum ALLOCATION_TYPE
3183 ALLOCATION_TYPE_NONE,
3184 ALLOCATION_TYPE_BLOCK,
3185 ALLOCATION_TYPE_DEDICATED,
3188 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3191 m_pUserData(VMA_NULL),
3192 m_LastUseFrameIndex(currentFrameIndex),
3193 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3194 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3196 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3202 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3205 VMA_ASSERT(m_pUserData == VMA_NULL);
3208 void InitBlockAllocation(
3210 VmaDeviceMemoryBlock* block,
3211 VkDeviceSize offset,
3212 VkDeviceSize alignment,
3214 VmaSuballocationType suballocationType,
3218 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3219 VMA_ASSERT(block != VMA_NULL);
3220 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3221 m_Alignment = alignment;
3223 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3224 m_SuballocationType = (uint8_t)suballocationType;
3225 m_BlockAllocation.m_hPool = hPool;
3226 m_BlockAllocation.m_Block = block;
3227 m_BlockAllocation.m_Offset = offset;
3228 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3233 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3234 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3235 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3236 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3237 m_BlockAllocation.m_Block = VMA_NULL;
3238 m_BlockAllocation.m_Offset = 0;
3239 m_BlockAllocation.m_CanBecomeLost =
true;
3242 void ChangeBlockAllocation(
3243 VmaDeviceMemoryBlock* block,
3244 VkDeviceSize offset)
3246 VMA_ASSERT(block != VMA_NULL);
3247 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3248 m_BlockAllocation.m_Block = block;
3249 m_BlockAllocation.m_Offset = offset;
3253 void InitDedicatedAllocation(
3254 uint32_t memoryTypeIndex,
3255 VkDeviceMemory hMemory,
3256 VmaSuballocationType suballocationType,
3260 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3261 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3262 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3265 m_SuballocationType = (uint8_t)suballocationType;
3266 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3267 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3268 m_DedicatedAllocation.m_hMemory = hMemory;
3269 m_DedicatedAllocation.m_pMappedData = pMappedData;
3272 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3273 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3274 VkDeviceSize GetSize()
const {
return m_Size; }
3275 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3276 void* GetUserData()
const {
return m_pUserData; }
3277 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3278 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3280 VmaDeviceMemoryBlock* GetBlock()
const 3282 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3283 return m_BlockAllocation.m_Block;
3285 VkDeviceSize GetOffset()
const;
3286 VkDeviceMemory GetMemory()
const;
3287 uint32_t GetMemoryTypeIndex()
const;
3288 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3289 void* GetMappedData()
const;
3290 bool CanBecomeLost()
const;
3291 VmaPool GetPool()
const;
3293 uint32_t GetLastUseFrameIndex()
const 3295 return m_LastUseFrameIndex.load();
3297 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3299 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3309 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3311 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3313 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3324 void BlockAllocMap();
3325 void BlockAllocUnmap();
3326 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3327 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3330 VkDeviceSize m_Alignment;
3331 VkDeviceSize m_Size;
3333 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3335 uint8_t m_SuballocationType;
3342 struct BlockAllocation
3345 VmaDeviceMemoryBlock* m_Block;
3346 VkDeviceSize m_Offset;
3347 bool m_CanBecomeLost;
3351 struct DedicatedAllocation
3353 uint32_t m_MemoryTypeIndex;
3354 VkDeviceMemory m_hMemory;
3355 void* m_pMappedData;
3361 BlockAllocation m_BlockAllocation;
3363 DedicatedAllocation m_DedicatedAllocation;
3366 void FreeUserDataString(VmaAllocator hAllocator);
3373 struct VmaSuballocation
3375 VkDeviceSize offset;
3377 VmaAllocation hAllocation;
3378 VmaSuballocationType type;
3381 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3384 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3399 struct VmaAllocationRequest
3401 VkDeviceSize offset;
3402 VkDeviceSize sumFreeSize;
3403 VkDeviceSize sumItemSize;
3404 VmaSuballocationList::iterator item;
3405 size_t itemsToMakeLostCount;
3407 VkDeviceSize CalcCost()
const 3409 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3417 class VmaBlockMetadata
3420 VmaBlockMetadata(VmaAllocator hAllocator);
3421 ~VmaBlockMetadata();
3422 void Init(VkDeviceSize size);
3425 bool Validate()
const;
3426 VkDeviceSize GetSize()
const {
return m_Size; }
3427 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3428 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3429 VkDeviceSize GetUnusedRangeSizeMax()
const;
3431 bool IsEmpty()
const;
3433 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3436 #if VMA_STATS_STRING_ENABLED 3437 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3441 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3446 bool CreateAllocationRequest(
3447 uint32_t currentFrameIndex,
3448 uint32_t frameInUseCount,
3449 VkDeviceSize bufferImageGranularity,
3450 VkDeviceSize allocSize,
3451 VkDeviceSize allocAlignment,
3452 VmaSuballocationType allocType,
3453 bool canMakeOtherLost,
3454 VmaAllocationRequest* pAllocationRequest);
3456 bool MakeRequestedAllocationsLost(
3457 uint32_t currentFrameIndex,
3458 uint32_t frameInUseCount,
3459 VmaAllocationRequest* pAllocationRequest);
3461 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3465 const VmaAllocationRequest& request,
3466 VmaSuballocationType type,
3467 VkDeviceSize allocSize,
3468 VmaAllocation hAllocation);
3471 void Free(
const VmaAllocation allocation);
3474 VkDeviceSize m_Size;
3475 uint32_t m_FreeCount;
3476 VkDeviceSize m_SumFreeSize;
3477 VmaSuballocationList m_Suballocations;
3480 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3482 bool ValidateFreeSuballocationList()
const;
3486 bool CheckAllocation(
3487 uint32_t currentFrameIndex,
3488 uint32_t frameInUseCount,
3489 VkDeviceSize bufferImageGranularity,
3490 VkDeviceSize allocSize,
3491 VkDeviceSize allocAlignment,
3492 VmaSuballocationType allocType,
3493 VmaSuballocationList::const_iterator suballocItem,
3494 bool canMakeOtherLost,
3495 VkDeviceSize* pOffset,
3496 size_t* itemsToMakeLostCount,
3497 VkDeviceSize* pSumFreeSize,
3498 VkDeviceSize* pSumItemSize)
const;
3500 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3504 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3507 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3510 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3514 class VmaDeviceMemoryMapping
3517 VmaDeviceMemoryMapping();
3518 ~VmaDeviceMemoryMapping();
3520 void* GetMappedData()
const {
return m_pMappedData; }
3523 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory,
void **ppData);
3524 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory);
3528 uint32_t m_MapCount;
3529 void* m_pMappedData;
3538 class VmaDeviceMemoryBlock
3541 uint32_t m_MemoryTypeIndex;
3542 VkDeviceMemory m_hMemory;
3543 VmaDeviceMemoryMapping m_Mapping;
3544 VmaBlockMetadata m_Metadata;
3546 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3548 ~VmaDeviceMemoryBlock()
3550 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3555 uint32_t newMemoryTypeIndex,
3556 VkDeviceMemory newMemory,
3557 VkDeviceSize newSize);
3559 void Destroy(VmaAllocator allocator);
3562 bool Validate()
const;
3565 VkResult Map(VmaAllocator hAllocator,
void** ppData);
3566 void Unmap(VmaAllocator hAllocator);
3569 struct VmaPointerLess
3571 bool operator()(
const void* lhs,
const void* rhs)
const 3577 class VmaDefragmentator;
3585 struct VmaBlockVector
3588 VmaAllocator hAllocator,
3589 uint32_t memoryTypeIndex,
3590 VkDeviceSize preferredBlockSize,
3591 size_t minBlockCount,
3592 size_t maxBlockCount,
3593 VkDeviceSize bufferImageGranularity,
3594 uint32_t frameInUseCount,
3598 VkResult CreateMinBlocks();
3600 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3601 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3602 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3603 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3607 bool IsEmpty()
const {
return m_Blocks.empty(); }
3610 VmaPool hCurrentPool,
3611 uint32_t currentFrameIndex,
3612 const VkMemoryRequirements& vkMemReq,
3614 VmaSuballocationType suballocType,
3615 VmaAllocation* pAllocation);
3618 VmaAllocation hAllocation);
3623 #if VMA_STATS_STRING_ENABLED 3624 void PrintDetailedMap(
class VmaJsonWriter& json);
3627 void MakePoolAllocationsLost(
3628 uint32_t currentFrameIndex,
3629 size_t* pLostAllocationCount);
3631 VmaDefragmentator* EnsureDefragmentator(
3632 VmaAllocator hAllocator,
3633 uint32_t currentFrameIndex);
3635 VkResult Defragment(
3637 VkDeviceSize& maxBytesToMove,
3638 uint32_t& maxAllocationsToMove);
3640 void DestroyDefragmentator();
3643 friend class VmaDefragmentator;
3645 const VmaAllocator m_hAllocator;
3646 const uint32_t m_MemoryTypeIndex;
3647 const VkDeviceSize m_PreferredBlockSize;
3648 const size_t m_MinBlockCount;
3649 const size_t m_MaxBlockCount;
3650 const VkDeviceSize m_BufferImageGranularity;
3651 const uint32_t m_FrameInUseCount;
3652 const bool m_IsCustomPool;
3655 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3659 bool m_HasEmptyBlock;
3660 VmaDefragmentator* m_pDefragmentator;
3662 size_t CalcMaxBlockSize()
const;
3665 void Remove(VmaDeviceMemoryBlock* pBlock);
3669 void IncrementallySortBlocks();
3671 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3677 VmaBlockVector m_BlockVector;
3681 VmaAllocator hAllocator,
3685 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3687 #if VMA_STATS_STRING_ENABLED 3692 class VmaDefragmentator
3694 const VmaAllocator m_hAllocator;
3695 VmaBlockVector*
const m_pBlockVector;
3696 uint32_t m_CurrentFrameIndex;
3697 VkDeviceSize m_BytesMoved;
3698 uint32_t m_AllocationsMoved;
3700 struct AllocationInfo
3702 VmaAllocation m_hAllocation;
3703 VkBool32* m_pChanged;
3706 m_hAllocation(VK_NULL_HANDLE),
3707 m_pChanged(VMA_NULL)
3712 struct AllocationInfoSizeGreater
3714 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3716 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3721 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3725 VmaDeviceMemoryBlock* m_pBlock;
3726 bool m_HasNonMovableAllocations;
3727 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3729 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3731 m_HasNonMovableAllocations(true),
3732 m_Allocations(pAllocationCallbacks),
3733 m_pMappedDataForDefragmentation(VMA_NULL)
3737 void CalcHasNonMovableAllocations()
3739 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3740 const size_t defragmentAllocCount = m_Allocations.size();
3741 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3744 void SortAllocationsBySizeDescecnding()
3746 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3749 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3750 void Unmap(VmaAllocator hAllocator);
3754 void* m_pMappedDataForDefragmentation;
3757 struct BlockPointerLess
3759 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3761 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3763 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3765 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3771 struct BlockInfoCompareMoveDestination
3773 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3775 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3779 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3783 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3791 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3792 BlockInfoVector m_Blocks;
3794 VkResult DefragmentRound(
3795 VkDeviceSize maxBytesToMove,
3796 uint32_t maxAllocationsToMove);
3798 static bool MoveMakesSense(
3799 size_t dstBlockIndex, VkDeviceSize dstOffset,
3800 size_t srcBlockIndex, VkDeviceSize srcOffset);
3804 VmaAllocator hAllocator,
3805 VmaBlockVector* pBlockVector,
3806 uint32_t currentFrameIndex);
3808 ~VmaDefragmentator();
3810 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3811 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3813 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3815 VkResult Defragment(
3816 VkDeviceSize maxBytesToMove,
3817 uint32_t maxAllocationsToMove);
3821 struct VmaAllocator_T
3824 bool m_UseKhrDedicatedAllocation;
3826 bool m_AllocationCallbacksSpecified;
3827 VkAllocationCallbacks m_AllocationCallbacks;
3831 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3832 VMA_MUTEX m_HeapSizeLimitMutex;
3834 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3835 VkPhysicalDeviceMemoryProperties m_MemProps;
3838 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
3841 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3842 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
3843 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3848 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3850 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3854 return m_VulkanFunctions;
3857 VkDeviceSize GetBufferImageGranularity()
const 3860 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3861 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3864 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3865 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3867 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3869 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3870 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3873 void GetBufferMemoryRequirements(
3875 VkMemoryRequirements& memReq,
3876 bool& requiresDedicatedAllocation,
3877 bool& prefersDedicatedAllocation)
const;
3878 void GetImageMemoryRequirements(
3880 VkMemoryRequirements& memReq,
3881 bool& requiresDedicatedAllocation,
3882 bool& prefersDedicatedAllocation)
const;
3885 VkResult AllocateMemory(
3886 const VkMemoryRequirements& vkMemReq,
3887 bool requiresDedicatedAllocation,
3888 bool prefersDedicatedAllocation,
3889 VkBuffer dedicatedBuffer,
3890 VkImage dedicatedImage,
3892 VmaSuballocationType suballocType,
3893 VmaAllocation* pAllocation);
3896 void FreeMemory(
const VmaAllocation allocation);
3898 void CalculateStats(
VmaStats* pStats);
3900 #if VMA_STATS_STRING_ENABLED 3901 void PrintDetailedMap(
class VmaJsonWriter& json);
3904 VkResult Defragment(
3905 VmaAllocation* pAllocations,
3906 size_t allocationCount,
3907 VkBool32* pAllocationsChanged,
3911 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3914 void DestroyPool(VmaPool pool);
3915 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3917 void SetCurrentFrameIndex(uint32_t frameIndex);
3919 void MakePoolAllocationsLost(
3921 size_t* pLostAllocationCount);
3923 void CreateLostAllocation(VmaAllocation* pAllocation);
3925 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3926 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3928 VkResult Map(VmaAllocation hAllocation,
void** ppData);
3929 void Unmap(VmaAllocation hAllocation);
3932 VkDeviceSize m_PreferredLargeHeapBlockSize;
3934 VkPhysicalDevice m_PhysicalDevice;
3935 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3937 VMA_MUTEX m_PoolsMutex;
3939 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3945 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3947 VkResult AllocateMemoryOfType(
3948 const VkMemoryRequirements& vkMemReq,
3949 bool dedicatedAllocation,
3950 VkBuffer dedicatedBuffer,
3951 VkImage dedicatedImage,
3953 uint32_t memTypeIndex,
3954 VmaSuballocationType suballocType,
3955 VmaAllocation* pAllocation);
3958 VkResult AllocateDedicatedMemory(
3960 VmaSuballocationType suballocType,
3961 uint32_t memTypeIndex,
3963 bool isUserDataString,
3965 VkBuffer dedicatedBuffer,
3966 VkImage dedicatedImage,
3967 VmaAllocation* pAllocation);
3970 void FreeDedicatedMemory(VmaAllocation allocation);
3976 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3978 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3981 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3983 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3986 template<
typename T>
3987 static T* VmaAllocate(VmaAllocator hAllocator)
3989 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3992 template<
typename T>
3993 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
3995 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
3998 template<
typename T>
3999 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4004 VmaFree(hAllocator, ptr);
4008 template<
typename T>
4009 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4013 for(
size_t i = count; i--; )
4015 VmaFree(hAllocator, ptr);
4022 #if VMA_STATS_STRING_ENABLED 4024 class VmaStringBuilder
4027 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4028 size_t GetLength()
const {
return m_Data.size(); }
4029 const char* GetData()
const {
return m_Data.data(); }
4031 void Add(
char ch) { m_Data.push_back(ch); }
4032 void Add(
const char* pStr);
4033 void AddNewLine() { Add(
'\n'); }
4034 void AddNumber(uint32_t num);
4035 void AddNumber(uint64_t num);
4036 void AddPointer(
const void* ptr);
4039 VmaVector< char, VmaStlAllocator<char> > m_Data;
4042 void VmaStringBuilder::Add(
const char* pStr)
4044 const size_t strLen = strlen(pStr);
4047 const size_t oldCount = m_Data.size();
4048 m_Data.resize(oldCount + strLen);
4049 memcpy(m_Data.data() + oldCount, pStr, strLen);
4053 void VmaStringBuilder::AddNumber(uint32_t num)
4056 VmaUint32ToStr(buf,
sizeof(buf), num);
4060 void VmaStringBuilder::AddNumber(uint64_t num)
4063 VmaUint64ToStr(buf,
sizeof(buf), num);
4067 void VmaStringBuilder::AddPointer(
const void* ptr)
4070 VmaPtrToStr(buf,
sizeof(buf), ptr);
4074 #endif // #if VMA_STATS_STRING_ENABLED 4079 #if VMA_STATS_STRING_ENABLED 4084 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4087 void BeginObject(
bool singleLine =
false);
4090 void BeginArray(
bool singleLine =
false);
4093 void WriteString(
const char* pStr);
4094 void BeginString(
const char* pStr = VMA_NULL);
4095 void ContinueString(
const char* pStr);
4096 void ContinueString(uint32_t n);
4097 void ContinueString(uint64_t n);
4098 void ContinueString_Pointer(
const void* ptr);
4099 void EndString(
const char* pStr = VMA_NULL);
4101 void WriteNumber(uint32_t n);
4102 void WriteNumber(uint64_t n);
4103 void WriteBool(
bool b);
4107 static const char*
const INDENT;
4109 enum COLLECTION_TYPE
4111 COLLECTION_TYPE_OBJECT,
4112 COLLECTION_TYPE_ARRAY,
4116 COLLECTION_TYPE type;
4117 uint32_t valueCount;
4118 bool singleLineMode;
4121 VmaStringBuilder& m_SB;
4122 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4123 bool m_InsideString;
4125 void BeginValue(
bool isString);
4126 void WriteIndent(
bool oneLess =
false);
4129 const char*
const VmaJsonWriter::INDENT =
" ";
4131 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4133 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4134 m_InsideString(false)
4138 VmaJsonWriter::~VmaJsonWriter()
4140 VMA_ASSERT(!m_InsideString);
4141 VMA_ASSERT(m_Stack.empty());
4144 void VmaJsonWriter::BeginObject(
bool singleLine)
4146 VMA_ASSERT(!m_InsideString);
4152 item.type = COLLECTION_TYPE_OBJECT;
4153 item.valueCount = 0;
4154 item.singleLineMode = singleLine;
4155 m_Stack.push_back(item);
4158 void VmaJsonWriter::EndObject()
4160 VMA_ASSERT(!m_InsideString);
4165 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4169 void VmaJsonWriter::BeginArray(
bool singleLine)
4171 VMA_ASSERT(!m_InsideString);
4177 item.type = COLLECTION_TYPE_ARRAY;
4178 item.valueCount = 0;
4179 item.singleLineMode = singleLine;
4180 m_Stack.push_back(item);
4183 void VmaJsonWriter::EndArray()
4185 VMA_ASSERT(!m_InsideString);
4190 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4194 void VmaJsonWriter::WriteString(
const char* pStr)
4200 void VmaJsonWriter::BeginString(
const char* pStr)
4202 VMA_ASSERT(!m_InsideString);
4206 m_InsideString =
true;
4207 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4209 ContinueString(pStr);
4213 void VmaJsonWriter::ContinueString(
const char* pStr)
4215 VMA_ASSERT(m_InsideString);
4217 const size_t strLen = strlen(pStr);
4218 for(
size_t i = 0; i < strLen; ++i)
4251 VMA_ASSERT(0 &&
"Character not currently supported.");
4257 void VmaJsonWriter::ContinueString(uint32_t n)
4259 VMA_ASSERT(m_InsideString);
4263 void VmaJsonWriter::ContinueString(uint64_t n)
4265 VMA_ASSERT(m_InsideString);
4269 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4271 VMA_ASSERT(m_InsideString);
4272 m_SB.AddPointer(ptr);
4275 void VmaJsonWriter::EndString(
const char* pStr)
4277 VMA_ASSERT(m_InsideString);
4278 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4280 ContinueString(pStr);
4283 m_InsideString =
false;
4286 void VmaJsonWriter::WriteNumber(uint32_t n)
4288 VMA_ASSERT(!m_InsideString);
4293 void VmaJsonWriter::WriteNumber(uint64_t n)
4295 VMA_ASSERT(!m_InsideString);
4300 void VmaJsonWriter::WriteBool(
bool b)
4302 VMA_ASSERT(!m_InsideString);
4304 m_SB.Add(b ?
"true" :
"false");
4307 void VmaJsonWriter::WriteNull()
4309 VMA_ASSERT(!m_InsideString);
4314 void VmaJsonWriter::BeginValue(
bool isString)
4316 if(!m_Stack.empty())
4318 StackItem& currItem = m_Stack.back();
4319 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4320 currItem.valueCount % 2 == 0)
4322 VMA_ASSERT(isString);
4325 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4326 currItem.valueCount % 2 != 0)
4330 else if(currItem.valueCount > 0)
4339 ++currItem.valueCount;
4343 void VmaJsonWriter::WriteIndent(
bool oneLess)
4345 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4349 size_t count = m_Stack.size();
4350 if(count > 0 && oneLess)
4354 for(
size_t i = 0; i < count; ++i)
4361 #endif // #if VMA_STATS_STRING_ENABLED 4365 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4367 if(IsUserDataString())
4369 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4371 FreeUserDataString(hAllocator);
4373 if(pUserData != VMA_NULL)
4375 const char*
const newStrSrc = (
char*)pUserData;
4376 const size_t newStrLen = strlen(newStrSrc);
4377 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4378 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4379 m_pUserData = newStrDst;
4384 m_pUserData = pUserData;
4388 VkDeviceSize VmaAllocation_T::GetOffset()
const 4392 case ALLOCATION_TYPE_BLOCK:
4393 return m_BlockAllocation.m_Offset;
4394 case ALLOCATION_TYPE_DEDICATED:
4402 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4406 case ALLOCATION_TYPE_BLOCK:
4407 return m_BlockAllocation.m_Block->m_hMemory;
4408 case ALLOCATION_TYPE_DEDICATED:
4409 return m_DedicatedAllocation.m_hMemory;
4412 return VK_NULL_HANDLE;
4416 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4420 case ALLOCATION_TYPE_BLOCK:
4421 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4422 case ALLOCATION_TYPE_DEDICATED:
4423 return m_DedicatedAllocation.m_MemoryTypeIndex;
4430 void* VmaAllocation_T::GetMappedData()
const 4434 case ALLOCATION_TYPE_BLOCK:
4437 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4438 VMA_ASSERT(pBlockData != VMA_NULL);
4439 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4446 case ALLOCATION_TYPE_DEDICATED:
4447 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4448 return m_DedicatedAllocation.m_pMappedData;
4455 bool VmaAllocation_T::CanBecomeLost()
const 4459 case ALLOCATION_TYPE_BLOCK:
4460 return m_BlockAllocation.m_CanBecomeLost;
4461 case ALLOCATION_TYPE_DEDICATED:
4469 VmaPool VmaAllocation_T::GetPool()
const 4471 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4472 return m_BlockAllocation.m_hPool;
4475 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4477 VMA_ASSERT(CanBecomeLost());
4483 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4486 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4491 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4497 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4507 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4509 VMA_ASSERT(IsUserDataString());
4510 if(m_pUserData != VMA_NULL)
4512 char*
const oldStr = (
char*)m_pUserData;
4513 const size_t oldStrLen = strlen(oldStr);
4514 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4515 m_pUserData = VMA_NULL;
4519 void VmaAllocation_T::BlockAllocMap()
4521 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4523 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4529 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4533 void VmaAllocation_T::BlockAllocUnmap()
4535 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4537 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4543 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4547 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4549 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4553 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4555 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4556 *ppData = m_DedicatedAllocation.m_pMappedData;
4562 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4563 return VK_ERROR_MEMORY_MAP_FAILED;
4568 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4569 hAllocator->m_hDevice,
4570 m_DedicatedAllocation.m_hMemory,
4575 if(result == VK_SUCCESS)
4577 m_DedicatedAllocation.m_pMappedData = *ppData;
4584 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4586 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4588 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4593 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4594 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4595 hAllocator->m_hDevice,
4596 m_DedicatedAllocation.m_hMemory);
4601 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4605 #if VMA_STATS_STRING_ENABLED 4608 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4617 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4621 json.WriteString(
"Blocks");
4624 json.WriteString(
"Allocations");
4627 json.WriteString(
"UnusedRanges");
4630 json.WriteString(
"UsedBytes");
4633 json.WriteString(
"UnusedBytes");
4638 json.WriteString(
"AllocationSize");
4639 json.BeginObject(
true);
4640 json.WriteString(
"Min");
4642 json.WriteString(
"Avg");
4644 json.WriteString(
"Max");
4651 json.WriteString(
"UnusedRangeSize");
4652 json.BeginObject(
true);
4653 json.WriteString(
"Min");
4655 json.WriteString(
"Avg");
4657 json.WriteString(
"Max");
4665 #endif // #if VMA_STATS_STRING_ENABLED 4667 struct VmaSuballocationItemSizeLess
4670 const VmaSuballocationList::iterator lhs,
4671 const VmaSuballocationList::iterator rhs)
const 4673 return lhs->size < rhs->size;
4676 const VmaSuballocationList::iterator lhs,
4677 VkDeviceSize rhsSize)
const 4679 return lhs->size < rhsSize;
4686 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4690 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4691 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4695 VmaBlockMetadata::~VmaBlockMetadata()
4699 void VmaBlockMetadata::Init(VkDeviceSize size)
4703 m_SumFreeSize = size;
4705 VmaSuballocation suballoc = {};
4706 suballoc.offset = 0;
4707 suballoc.size = size;
4708 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4709 suballoc.hAllocation = VK_NULL_HANDLE;
4711 m_Suballocations.push_back(suballoc);
4712 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4714 m_FreeSuballocationsBySize.push_back(suballocItem);
4717 bool VmaBlockMetadata::Validate()
const 4719 if(m_Suballocations.empty())
4725 VkDeviceSize calculatedOffset = 0;
4727 uint32_t calculatedFreeCount = 0;
4729 VkDeviceSize calculatedSumFreeSize = 0;
4732 size_t freeSuballocationsToRegister = 0;
4734 bool prevFree =
false;
4736 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4737 suballocItem != m_Suballocations.cend();
4740 const VmaSuballocation& subAlloc = *suballocItem;
4743 if(subAlloc.offset != calculatedOffset)
4748 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4750 if(prevFree && currFree)
4754 prevFree = currFree;
4756 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4763 calculatedSumFreeSize += subAlloc.size;
4764 ++calculatedFreeCount;
4765 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4767 ++freeSuballocationsToRegister;
4771 calculatedOffset += subAlloc.size;
4776 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4781 VkDeviceSize lastSize = 0;
4782 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4784 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4787 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4792 if(suballocItem->size < lastSize)
4797 lastSize = suballocItem->size;
4802 ValidateFreeSuballocationList() &&
4803 (calculatedOffset == m_Size) &&
4804 (calculatedSumFreeSize == m_SumFreeSize) &&
4805 (calculatedFreeCount == m_FreeCount);
4808 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4810 if(!m_FreeSuballocationsBySize.empty())
4812 return m_FreeSuballocationsBySize.back()->size;
4820 bool VmaBlockMetadata::IsEmpty()
const 4822 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4825 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4829 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4841 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4842 suballocItem != m_Suballocations.cend();
4845 const VmaSuballocation& suballoc = *suballocItem;
4846 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4859 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4861 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4863 inoutStats.
size += m_Size;
4870 #if VMA_STATS_STRING_ENABLED 4872 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4876 json.WriteString(
"TotalBytes");
4877 json.WriteNumber(m_Size);
4879 json.WriteString(
"UnusedBytes");
4880 json.WriteNumber(m_SumFreeSize);
4882 json.WriteString(
"Allocations");
4883 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4885 json.WriteString(
"UnusedRanges");
4886 json.WriteNumber(m_FreeCount);
4888 json.WriteString(
"Suballocations");
4891 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4892 suballocItem != m_Suballocations.cend();
4893 ++suballocItem, ++i)
4895 json.BeginObject(
true);
4897 json.WriteString(
"Type");
4898 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4900 json.WriteString(
"Size");
4901 json.WriteNumber(suballocItem->size);
4903 json.WriteString(
"Offset");
4904 json.WriteNumber(suballocItem->offset);
4906 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4908 const void* pUserData = suballocItem->hAllocation->GetUserData();
4909 if(pUserData != VMA_NULL)
4911 json.WriteString(
"UserData");
4912 if(suballocItem->hAllocation->IsUserDataString())
4914 json.WriteString((
const char*)pUserData);
4919 json.ContinueString_Pointer(pUserData);
4932 #endif // #if VMA_STATS_STRING_ENABLED 4944 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4946 VMA_ASSERT(IsEmpty());
4947 pAllocationRequest->offset = 0;
4948 pAllocationRequest->sumFreeSize = m_SumFreeSize;
4949 pAllocationRequest->sumItemSize = 0;
4950 pAllocationRequest->item = m_Suballocations.begin();
4951 pAllocationRequest->itemsToMakeLostCount = 0;
4954 bool VmaBlockMetadata::CreateAllocationRequest(
4955 uint32_t currentFrameIndex,
4956 uint32_t frameInUseCount,
4957 VkDeviceSize bufferImageGranularity,
4958 VkDeviceSize allocSize,
4959 VkDeviceSize allocAlignment,
4960 VmaSuballocationType allocType,
4961 bool canMakeOtherLost,
4962 VmaAllocationRequest* pAllocationRequest)
4964 VMA_ASSERT(allocSize > 0);
4965 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4966 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4967 VMA_HEAVY_ASSERT(Validate());
4970 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4976 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4977 if(freeSuballocCount > 0)
4982 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4983 m_FreeSuballocationsBySize.data(),
4984 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4986 VmaSuballocationItemSizeLess());
4987 size_t index = it - m_FreeSuballocationsBySize.data();
4988 for(; index < freeSuballocCount; ++index)
4993 bufferImageGranularity,
4997 m_FreeSuballocationsBySize[index],
4999 &pAllocationRequest->offset,
5000 &pAllocationRequest->itemsToMakeLostCount,
5001 &pAllocationRequest->sumFreeSize,
5002 &pAllocationRequest->sumItemSize))
5004 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5012 for(
size_t index = freeSuballocCount; index--; )
5017 bufferImageGranularity,
5021 m_FreeSuballocationsBySize[index],
5023 &pAllocationRequest->offset,
5024 &pAllocationRequest->itemsToMakeLostCount,
5025 &pAllocationRequest->sumFreeSize,
5026 &pAllocationRequest->sumItemSize))
5028 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5035 if(canMakeOtherLost)
5039 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5040 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5042 VmaAllocationRequest tmpAllocRequest = {};
5043 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5044 suballocIt != m_Suballocations.end();
5047 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5048 suballocIt->hAllocation->CanBecomeLost())
5053 bufferImageGranularity,
5059 &tmpAllocRequest.offset,
5060 &tmpAllocRequest.itemsToMakeLostCount,
5061 &tmpAllocRequest.sumFreeSize,
5062 &tmpAllocRequest.sumItemSize))
5064 tmpAllocRequest.item = suballocIt;
5066 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5068 *pAllocationRequest = tmpAllocRequest;
5074 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5083 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5084 uint32_t currentFrameIndex,
5085 uint32_t frameInUseCount,
5086 VmaAllocationRequest* pAllocationRequest)
5088 while(pAllocationRequest->itemsToMakeLostCount > 0)
5090 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5092 ++pAllocationRequest->item;
5094 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5095 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5096 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5097 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5099 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5100 --pAllocationRequest->itemsToMakeLostCount;
5108 VMA_HEAVY_ASSERT(Validate());
5109 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5110 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5115 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5117 uint32_t lostAllocationCount = 0;
5118 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5119 it != m_Suballocations.end();
5122 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5123 it->hAllocation->CanBecomeLost() &&
5124 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5126 it = FreeSuballocation(it);
5127 ++lostAllocationCount;
5130 return lostAllocationCount;
5133 void VmaBlockMetadata::Alloc(
5134 const VmaAllocationRequest& request,
5135 VmaSuballocationType type,
5136 VkDeviceSize allocSize,
5137 VmaAllocation hAllocation)
5139 VMA_ASSERT(request.item != m_Suballocations.end());
5140 VmaSuballocation& suballoc = *request.item;
5142 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5144 VMA_ASSERT(request.offset >= suballoc.offset);
5145 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5146 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5147 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5151 UnregisterFreeSuballocation(request.item);
5153 suballoc.offset = request.offset;
5154 suballoc.size = allocSize;
5155 suballoc.type = type;
5156 suballoc.hAllocation = hAllocation;
5161 VmaSuballocation paddingSuballoc = {};
5162 paddingSuballoc.offset = request.offset + allocSize;
5163 paddingSuballoc.size = paddingEnd;
5164 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5165 VmaSuballocationList::iterator next = request.item;
5167 const VmaSuballocationList::iterator paddingEndItem =
5168 m_Suballocations.insert(next, paddingSuballoc);
5169 RegisterFreeSuballocation(paddingEndItem);
5175 VmaSuballocation paddingSuballoc = {};
5176 paddingSuballoc.offset = request.offset - paddingBegin;
5177 paddingSuballoc.size = paddingBegin;
5178 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5179 const VmaSuballocationList::iterator paddingBeginItem =
5180 m_Suballocations.insert(request.item, paddingSuballoc);
5181 RegisterFreeSuballocation(paddingBeginItem);
5185 m_FreeCount = m_FreeCount - 1;
5186 if(paddingBegin > 0)
5194 m_SumFreeSize -= allocSize;
5197 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5199 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5200 suballocItem != m_Suballocations.end();
5203 VmaSuballocation& suballoc = *suballocItem;
5204 if(suballoc.hAllocation == allocation)
5206 FreeSuballocation(suballocItem);
5207 VMA_HEAVY_ASSERT(Validate());
5211 VMA_ASSERT(0 &&
"Not found!");
5214 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5216 VkDeviceSize lastSize = 0;
5217 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5219 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5221 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5226 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5231 if(it->size < lastSize)
5237 lastSize = it->size;
5242 bool VmaBlockMetadata::CheckAllocation(
5243 uint32_t currentFrameIndex,
5244 uint32_t frameInUseCount,
5245 VkDeviceSize bufferImageGranularity,
5246 VkDeviceSize allocSize,
5247 VkDeviceSize allocAlignment,
5248 VmaSuballocationType allocType,
5249 VmaSuballocationList::const_iterator suballocItem,
5250 bool canMakeOtherLost,
5251 VkDeviceSize* pOffset,
5252 size_t* itemsToMakeLostCount,
5253 VkDeviceSize* pSumFreeSize,
5254 VkDeviceSize* pSumItemSize)
const 5256 VMA_ASSERT(allocSize > 0);
5257 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5258 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5259 VMA_ASSERT(pOffset != VMA_NULL);
5261 *itemsToMakeLostCount = 0;
5265 if(canMakeOtherLost)
5267 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5269 *pSumFreeSize = suballocItem->size;
5273 if(suballocItem->hAllocation->CanBecomeLost() &&
5274 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5276 ++*itemsToMakeLostCount;
5277 *pSumItemSize = suballocItem->size;
5286 if(m_Size - suballocItem->offset < allocSize)
5292 *pOffset = suballocItem->offset;
5295 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5297 *pOffset += VMA_DEBUG_MARGIN;
5301 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5302 *pOffset = VmaAlignUp(*pOffset, alignment);
5306 if(bufferImageGranularity > 1)
5308 bool bufferImageGranularityConflict =
false;
5309 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5310 while(prevSuballocItem != m_Suballocations.cbegin())
5313 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5314 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5316 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5318 bufferImageGranularityConflict =
true;
5326 if(bufferImageGranularityConflict)
5328 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5334 if(*pOffset >= suballocItem->offset + suballocItem->size)
5340 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5343 VmaSuballocationList::const_iterator next = suballocItem;
5345 const VkDeviceSize requiredEndMargin =
5346 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5348 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5350 if(suballocItem->offset + totalSize > m_Size)
5357 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5358 if(totalSize > suballocItem->size)
5360 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5361 while(remainingSize > 0)
5364 if(lastSuballocItem == m_Suballocations.cend())
5368 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5370 *pSumFreeSize += lastSuballocItem->size;
5374 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5375 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5376 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5378 ++*itemsToMakeLostCount;
5379 *pSumItemSize += lastSuballocItem->size;
5386 remainingSize = (lastSuballocItem->size < remainingSize) ?
5387 remainingSize - lastSuballocItem->size : 0;
5393 if(bufferImageGranularity > 1)
5395 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5397 while(nextSuballocItem != m_Suballocations.cend())
5399 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5400 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5402 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5404 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5405 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5406 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5408 ++*itemsToMakeLostCount;
5427 const VmaSuballocation& suballoc = *suballocItem;
5428 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5430 *pSumFreeSize = suballoc.size;
5433 if(suballoc.size < allocSize)
5439 *pOffset = suballoc.offset;
5442 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5444 *pOffset += VMA_DEBUG_MARGIN;
5448 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5449 *pOffset = VmaAlignUp(*pOffset, alignment);
5453 if(bufferImageGranularity > 1)
5455 bool bufferImageGranularityConflict =
false;
5456 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5457 while(prevSuballocItem != m_Suballocations.cbegin())
5460 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5461 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5463 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5465 bufferImageGranularityConflict =
true;
5473 if(bufferImageGranularityConflict)
5475 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5480 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5483 VmaSuballocationList::const_iterator next = suballocItem;
5485 const VkDeviceSize requiredEndMargin =
5486 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5489 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5496 if(bufferImageGranularity > 1)
5498 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5500 while(nextSuballocItem != m_Suballocations.cend())
5502 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5503 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5505 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5524 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5526 VMA_ASSERT(item != m_Suballocations.end());
5527 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5529 VmaSuballocationList::iterator nextItem = item;
5531 VMA_ASSERT(nextItem != m_Suballocations.end());
5532 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5534 item->size += nextItem->size;
5536 m_Suballocations.erase(nextItem);
5539 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5542 VmaSuballocation& suballoc = *suballocItem;
5543 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5544 suballoc.hAllocation = VK_NULL_HANDLE;
5548 m_SumFreeSize += suballoc.size;
5551 bool mergeWithNext =
false;
5552 bool mergeWithPrev =
false;
5554 VmaSuballocationList::iterator nextItem = suballocItem;
5556 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5558 mergeWithNext =
true;
5561 VmaSuballocationList::iterator prevItem = suballocItem;
5562 if(suballocItem != m_Suballocations.begin())
5565 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5567 mergeWithPrev =
true;
5573 UnregisterFreeSuballocation(nextItem);
5574 MergeFreeWithNext(suballocItem);
5579 UnregisterFreeSuballocation(prevItem);
5580 MergeFreeWithNext(prevItem);
5581 RegisterFreeSuballocation(prevItem);
5586 RegisterFreeSuballocation(suballocItem);
5587 return suballocItem;
5591 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5593 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5594 VMA_ASSERT(item->size > 0);
5598 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5600 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5602 if(m_FreeSuballocationsBySize.empty())
5604 m_FreeSuballocationsBySize.push_back(item);
5608 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5616 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5618 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5619 VMA_ASSERT(item->size > 0);
5623 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5625 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5627 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5628 m_FreeSuballocationsBySize.data(),
5629 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5631 VmaSuballocationItemSizeLess());
5632 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5633 index < m_FreeSuballocationsBySize.size();
5636 if(m_FreeSuballocationsBySize[index] == item)
5638 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5641 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5643 VMA_ASSERT(0 &&
"Not found.");
5652 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5654 m_pMappedData(VMA_NULL)
5658 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5660 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5663 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory,
void **ppData)
5665 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5669 VMA_ASSERT(m_pMappedData != VMA_NULL);
5670 if(ppData != VMA_NULL)
5672 *ppData = m_pMappedData;
5678 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5679 hAllocator->m_hDevice,
5685 if(result == VK_SUCCESS)
5687 if(ppData != VMA_NULL)
5689 *ppData = m_pMappedData;
5697 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory)
5699 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5702 if(--m_MapCount == 0)
5704 m_pMappedData = VMA_NULL;
5705 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
5710 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
5717 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5718 m_MemoryTypeIndex(UINT32_MAX),
5719 m_hMemory(VK_NULL_HANDLE),
5720 m_Metadata(hAllocator)
5724 void VmaDeviceMemoryBlock::Init(
5725 uint32_t newMemoryTypeIndex,
5726 VkDeviceMemory newMemory,
5727 VkDeviceSize newSize)
5729 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5731 m_MemoryTypeIndex = newMemoryTypeIndex;
5732 m_hMemory = newMemory;
5734 m_Metadata.Init(newSize);
5737 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5741 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5743 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5744 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5745 m_hMemory = VK_NULL_HANDLE;
5748 bool VmaDeviceMemoryBlock::Validate()
const 5750 if((m_hMemory == VK_NULL_HANDLE) ||
5751 (m_Metadata.GetSize() == 0))
5756 return m_Metadata.Validate();
5759 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator,
void** ppData)
5761 return m_Mapping.Map(hAllocator, m_hMemory, ppData);
5764 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator)
5766 m_Mapping.Unmap(hAllocator, m_hMemory);
5771 memset(&outInfo, 0,
sizeof(outInfo));
5790 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5798 VmaPool_T::VmaPool_T(
5799 VmaAllocator hAllocator,
5803 createInfo.memoryTypeIndex,
5804 createInfo.blockSize,
5805 createInfo.minBlockCount,
5806 createInfo.maxBlockCount,
5808 createInfo.frameInUseCount,
5813 VmaPool_T::~VmaPool_T()
5817 #if VMA_STATS_STRING_ENABLED 5819 #endif // #if VMA_STATS_STRING_ENABLED 5821 VmaBlockVector::VmaBlockVector(
5822 VmaAllocator hAllocator,
5823 uint32_t memoryTypeIndex,
5824 VkDeviceSize preferredBlockSize,
5825 size_t minBlockCount,
5826 size_t maxBlockCount,
5827 VkDeviceSize bufferImageGranularity,
5828 uint32_t frameInUseCount,
5829 bool isCustomPool) :
5830 m_hAllocator(hAllocator),
5831 m_MemoryTypeIndex(memoryTypeIndex),
5832 m_PreferredBlockSize(preferredBlockSize),
5833 m_MinBlockCount(minBlockCount),
5834 m_MaxBlockCount(maxBlockCount),
5835 m_BufferImageGranularity(bufferImageGranularity),
5836 m_FrameInUseCount(frameInUseCount),
5837 m_IsCustomPool(isCustomPool),
5838 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5839 m_HasEmptyBlock(false),
5840 m_pDefragmentator(VMA_NULL)
5844 VmaBlockVector::~VmaBlockVector()
5846 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5848 for(
size_t i = m_Blocks.size(); i--; )
5850 m_Blocks[i]->Destroy(m_hAllocator);
5851 vma_delete(m_hAllocator, m_Blocks[i]);
5855 VkResult VmaBlockVector::CreateMinBlocks()
5857 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5859 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5860 if(res != VK_SUCCESS)
5868 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5876 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5878 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5880 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5882 VMA_HEAVY_ASSERT(pBlock->Validate());
5883 pBlock->m_Metadata.AddPoolStats(*pStats);
5887 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5889 VkResult VmaBlockVector::Allocate(
5890 VmaPool hCurrentPool,
5891 uint32_t currentFrameIndex,
5892 const VkMemoryRequirements& vkMemReq,
5894 VmaSuballocationType suballocType,
5895 VmaAllocation* pAllocation)
5900 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5904 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5906 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5907 VMA_ASSERT(pCurrBlock);
5908 VmaAllocationRequest currRequest = {};
5909 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5912 m_BufferImageGranularity,
5920 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5924 VkResult res = pCurrBlock->Map(m_hAllocator,
nullptr);
5925 if(res != VK_SUCCESS)
5932 if(pCurrBlock->m_Metadata.IsEmpty())
5934 m_HasEmptyBlock =
false;
5937 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
5938 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5939 (*pAllocation)->InitBlockAllocation(
5948 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5949 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5950 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
5955 const bool canCreateNewBlock =
5957 (m_Blocks.size() < m_MaxBlockCount);
5960 if(canCreateNewBlock)
5963 VkDeviceSize newBlockSize = m_PreferredBlockSize;
5964 uint32_t newBlockSizeShift = 0;
5965 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
5969 if(m_IsCustomPool ==
false)
5972 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
5973 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
5975 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
5976 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
5978 newBlockSize = smallerNewBlockSize;
5979 ++newBlockSizeShift;
5988 size_t newBlockIndex = 0;
5989 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
5991 if(m_IsCustomPool ==
false)
5993 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
5995 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
5996 if(smallerNewBlockSize >= vkMemReq.size)
5998 newBlockSize = smallerNewBlockSize;
5999 ++newBlockSizeShift;
6000 res = CreateBlock(newBlockSize, &newBlockIndex);
6009 if(res == VK_SUCCESS)
6011 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6012 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6016 res = pBlock->Map(m_hAllocator,
nullptr);
6017 if(res != VK_SUCCESS)
6024 VmaAllocationRequest allocRequest;
6025 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6026 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6027 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6028 (*pAllocation)->InitBlockAllocation(
6031 allocRequest.offset,
6037 VMA_HEAVY_ASSERT(pBlock->Validate());
6038 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6039 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6047 if(canMakeOtherLost)
6049 uint32_t tryIndex = 0;
6050 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6052 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6053 VmaAllocationRequest bestRequest = {};
6054 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6058 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6060 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6061 VMA_ASSERT(pCurrBlock);
6062 VmaAllocationRequest currRequest = {};
6063 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6066 m_BufferImageGranularity,
6073 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6074 if(pBestRequestBlock == VMA_NULL ||
6075 currRequestCost < bestRequestCost)
6077 pBestRequestBlock = pCurrBlock;
6078 bestRequest = currRequest;
6079 bestRequestCost = currRequestCost;
6081 if(bestRequestCost == 0)
6089 if(pBestRequestBlock != VMA_NULL)
6093 VkResult res = pBestRequestBlock->Map(m_hAllocator,
nullptr);
6094 if(res != VK_SUCCESS)
6100 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6106 if(pBestRequestBlock->m_Metadata.IsEmpty())
6108 m_HasEmptyBlock =
false;
6111 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6112 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6113 (*pAllocation)->InitBlockAllocation(
6122 VMA_HEAVY_ASSERT(pBlock->Validate());
6123 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6124 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6138 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6140 return VK_ERROR_TOO_MANY_OBJECTS;
6144 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6147 void VmaBlockVector::Free(
6148 VmaAllocation hAllocation)
6150 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6154 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6156 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6158 if(hAllocation->IsPersistentMap())
6160 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory);
6163 pBlock->m_Metadata.Free(hAllocation);
6164 VMA_HEAVY_ASSERT(pBlock->Validate());
6166 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6169 if(pBlock->m_Metadata.IsEmpty())
6172 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6174 pBlockToDelete = pBlock;
6180 m_HasEmptyBlock =
true;
6185 else if(m_HasEmptyBlock)
6187 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6188 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6190 pBlockToDelete = pLastBlock;
6191 m_Blocks.pop_back();
6192 m_HasEmptyBlock =
false;
6196 IncrementallySortBlocks();
6201 if(pBlockToDelete != VMA_NULL)
6203 VMA_DEBUG_LOG(
" Deleted empty allocation");
6204 pBlockToDelete->Destroy(m_hAllocator);
6205 vma_delete(m_hAllocator, pBlockToDelete);
6209 size_t VmaBlockVector::CalcMaxBlockSize()
const 6212 for(
size_t i = m_Blocks.size(); i--; )
6214 result = VMA_MAX(result, m_Blocks[i]->m_Metadata.GetSize());
6215 if(result >= m_PreferredBlockSize)
6223 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6225 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6227 if(m_Blocks[blockIndex] == pBlock)
6229 VmaVectorRemove(m_Blocks, blockIndex);
6236 void VmaBlockVector::IncrementallySortBlocks()
6239 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6241 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6243 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6249 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6251 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6252 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6253 allocInfo.allocationSize = blockSize;
6254 VkDeviceMemory mem = VK_NULL_HANDLE;
6255 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6264 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6268 allocInfo.allocationSize);
6270 m_Blocks.push_back(pBlock);
6271 if(pNewBlockIndex != VMA_NULL)
6273 *pNewBlockIndex = m_Blocks.size() - 1;
6279 #if VMA_STATS_STRING_ENABLED 6281 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6283 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6289 json.WriteString(
"MemoryTypeIndex");
6290 json.WriteNumber(m_MemoryTypeIndex);
6292 json.WriteString(
"BlockSize");
6293 json.WriteNumber(m_PreferredBlockSize);
6295 json.WriteString(
"BlockCount");
6296 json.BeginObject(
true);
6297 if(m_MinBlockCount > 0)
6299 json.WriteString(
"Min");
6300 json.WriteNumber(m_MinBlockCount);
6302 if(m_MaxBlockCount < SIZE_MAX)
6304 json.WriteString(
"Max");
6305 json.WriteNumber(m_MaxBlockCount);
6307 json.WriteString(
"Cur");
6308 json.WriteNumber(m_Blocks.size());
6311 if(m_FrameInUseCount > 0)
6313 json.WriteString(
"FrameInUseCount");
6314 json.WriteNumber(m_FrameInUseCount);
6319 json.WriteString(
"PreferredBlockSize");
6320 json.WriteNumber(m_PreferredBlockSize);
6323 json.WriteString(
"Blocks");
6325 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6327 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6334 #endif // #if VMA_STATS_STRING_ENABLED 6336 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6337 VmaAllocator hAllocator,
6338 uint32_t currentFrameIndex)
6340 if(m_pDefragmentator == VMA_NULL)
6342 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6348 return m_pDefragmentator;
6351 VkResult VmaBlockVector::Defragment(
6353 VkDeviceSize& maxBytesToMove,
6354 uint32_t& maxAllocationsToMove)
6356 if(m_pDefragmentator == VMA_NULL)
6361 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6364 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6367 if(pDefragmentationStats != VMA_NULL)
6369 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6370 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6373 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6374 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6380 m_HasEmptyBlock =
false;
6381 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6383 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6384 if(pBlock->m_Metadata.IsEmpty())
6386 if(m_Blocks.size() > m_MinBlockCount)
6388 if(pDefragmentationStats != VMA_NULL)
6391 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6394 VmaVectorRemove(m_Blocks, blockIndex);
6395 pBlock->Destroy(m_hAllocator);
6396 vma_delete(m_hAllocator, pBlock);
6400 m_HasEmptyBlock =
true;
6408 void VmaBlockVector::DestroyDefragmentator()
6410 if(m_pDefragmentator != VMA_NULL)
6412 vma_delete(m_hAllocator, m_pDefragmentator);
6413 m_pDefragmentator = VMA_NULL;
6417 void VmaBlockVector::MakePoolAllocationsLost(
6418 uint32_t currentFrameIndex,
6419 size_t* pLostAllocationCount)
6421 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6422 size_t lostAllocationCount = 0;
6423 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6425 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6427 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6429 if(pLostAllocationCount != VMA_NULL)
6431 *pLostAllocationCount = lostAllocationCount;
6435 void VmaBlockVector::AddStats(
VmaStats* pStats)
6437 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6438 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6440 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6442 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6444 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6446 VMA_HEAVY_ASSERT(pBlock->Validate());
6448 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6449 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6450 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6451 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6458 VmaDefragmentator::VmaDefragmentator(
6459 VmaAllocator hAllocator,
6460 VmaBlockVector* pBlockVector,
6461 uint32_t currentFrameIndex) :
6462 m_hAllocator(hAllocator),
6463 m_pBlockVector(pBlockVector),
6464 m_CurrentFrameIndex(currentFrameIndex),
6466 m_AllocationsMoved(0),
6467 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6468 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6472 VmaDefragmentator::~VmaDefragmentator()
6474 for(
size_t i = m_Blocks.size(); i--; )
6476 vma_delete(m_hAllocator, m_Blocks[i]);
6480 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6482 AllocationInfo allocInfo;
6483 allocInfo.m_hAllocation = hAlloc;
6484 allocInfo.m_pChanged = pChanged;
6485 m_Allocations.push_back(allocInfo);
6488 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6491 if(m_pMappedDataForDefragmentation)
6493 *ppMappedData = m_pMappedDataForDefragmentation;
6498 if(m_pBlock->m_Mapping.GetMappedData())
6500 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6505 VkResult res = m_pBlock->Map(hAllocator, &m_pMappedDataForDefragmentation);
6506 *ppMappedData = m_pMappedDataForDefragmentation;
6510 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6512 if(m_pMappedDataForDefragmentation != VMA_NULL)
6514 m_pBlock->Unmap(hAllocator);
6518 VkResult VmaDefragmentator::DefragmentRound(
6519 VkDeviceSize maxBytesToMove,
6520 uint32_t maxAllocationsToMove)
6522 if(m_Blocks.empty())
6527 size_t srcBlockIndex = m_Blocks.size() - 1;
6528 size_t srcAllocIndex = SIZE_MAX;
6534 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6536 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6539 if(srcBlockIndex == 0)
6546 srcAllocIndex = SIZE_MAX;
6551 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6555 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6556 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6558 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6559 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6560 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6561 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6564 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6566 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6567 VmaAllocationRequest dstAllocRequest;
6568 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6569 m_CurrentFrameIndex,
6570 m_pBlockVector->GetFrameInUseCount(),
6571 m_pBlockVector->GetBufferImageGranularity(),
6576 &dstAllocRequest) &&
6578 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6580 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6583 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6584 (m_BytesMoved + size > maxBytesToMove))
6586 return VK_INCOMPLETE;
6589 void* pDstMappedData = VMA_NULL;
6590 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6591 if(res != VK_SUCCESS)
6596 void* pSrcMappedData = VMA_NULL;
6597 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6598 if(res != VK_SUCCESS)
6605 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6606 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6607 static_cast<size_t>(size));
6609 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6610 pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6612 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6614 if(allocInfo.m_pChanged != VMA_NULL)
6616 *allocInfo.m_pChanged = VK_TRUE;
6619 ++m_AllocationsMoved;
6620 m_BytesMoved += size;
6622 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6630 if(srcAllocIndex > 0)
6636 if(srcBlockIndex > 0)
6639 srcAllocIndex = SIZE_MAX;
6649 VkResult VmaDefragmentator::Defragment(
6650 VkDeviceSize maxBytesToMove,
6651 uint32_t maxAllocationsToMove)
6653 if(m_Allocations.empty())
6659 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6660 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6662 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6663 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6664 m_Blocks.push_back(pBlockInfo);
6668 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6671 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6673 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6675 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6677 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6678 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6679 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6681 (*it)->m_Allocations.push_back(allocInfo);
6689 m_Allocations.clear();
6691 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6693 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6694 pBlockInfo->CalcHasNonMovableAllocations();
6695 pBlockInfo->SortAllocationsBySizeDescecnding();
6699 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6702 VkResult result = VK_SUCCESS;
6703 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6705 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6709 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6711 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6717 bool VmaDefragmentator::MoveMakesSense(
6718 size_t dstBlockIndex, VkDeviceSize dstOffset,
6719 size_t srcBlockIndex, VkDeviceSize srcOffset)
6721 if(dstBlockIndex < srcBlockIndex)
6725 if(dstBlockIndex > srcBlockIndex)
6729 if(dstOffset < srcOffset)
6742 m_hDevice(pCreateInfo->device),
6743 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6744 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6745 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6746 m_PreferredLargeHeapBlockSize(0),
6747 m_PhysicalDevice(pCreateInfo->physicalDevice),
6748 m_CurrentFrameIndex(0),
6749 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6753 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6754 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6755 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6757 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6758 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
6760 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6762 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6773 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6774 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6781 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6783 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6784 if(limit != VK_WHOLE_SIZE)
6786 m_HeapSizeLimit[heapIndex] = limit;
6787 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6789 m_MemProps.memoryHeaps[heapIndex].size = limit;
6795 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6797 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6799 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
6805 GetBufferImageGranularity(),
6810 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6814 VmaAllocator_T::~VmaAllocator_T()
6816 VMA_ASSERT(m_Pools.empty());
6818 for(
size_t i = GetMemoryTypeCount(); i--; )
6820 vma_delete(
this, m_pDedicatedAllocations[i]);
6821 vma_delete(
this, m_pBlockVectors[i]);
6825 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6827 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6828 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6829 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6830 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6831 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6832 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6833 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6834 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6835 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6836 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6837 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6838 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6839 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6840 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6841 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6842 if(m_UseKhrDedicatedAllocation)
6844 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
6845 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
6846 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
6847 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
6849 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6851 #define VMA_COPY_IF_NOT_NULL(funcName) \ 6852 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 6854 if(pVulkanFunctions != VMA_NULL)
6856 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
6857 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
6858 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
6859 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
6860 VMA_COPY_IF_NOT_NULL(vkMapMemory);
6861 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
6862 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
6863 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
6864 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
6865 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
6866 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
6867 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
6868 VMA_COPY_IF_NOT_NULL(vkCreateImage);
6869 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
6870 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
6871 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
6874 #undef VMA_COPY_IF_NOT_NULL 6878 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6879 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6880 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6881 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6882 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6883 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6884 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6885 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6886 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6887 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6888 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6889 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6890 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6891 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6892 if(m_UseKhrDedicatedAllocation)
6894 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
6895 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
6899 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6901 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6902 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6903 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
6904 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
6907 VkResult VmaAllocator_T::AllocateMemoryOfType(
6908 const VkMemoryRequirements& vkMemReq,
6909 bool dedicatedAllocation,
6910 VkBuffer dedicatedBuffer,
6911 VkImage dedicatedImage,
6913 uint32_t memTypeIndex,
6914 VmaSuballocationType suballocType,
6915 VmaAllocation* pAllocation)
6917 VMA_ASSERT(pAllocation != VMA_NULL);
6918 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6924 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
6929 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
6930 VMA_ASSERT(blockVector);
6932 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6933 bool preferDedicatedMemory =
6934 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
6935 dedicatedAllocation ||
6937 vkMemReq.size > preferredBlockSize / 2;
6939 if(preferDedicatedMemory &&
6941 finalCreateInfo.
pool == VK_NULL_HANDLE)
6950 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6954 return AllocateDedicatedMemory(
6968 VkResult res = blockVector->Allocate(
6970 m_CurrentFrameIndex.load(),
6975 if(res == VK_SUCCESS)
6983 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6987 res = AllocateDedicatedMemory(
6993 finalCreateInfo.pUserData,
6997 if(res == VK_SUCCESS)
7000 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7006 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7013 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7015 VmaSuballocationType suballocType,
7016 uint32_t memTypeIndex,
7018 bool isUserDataString,
7020 VkBuffer dedicatedBuffer,
7021 VkImage dedicatedImage,
7022 VmaAllocation* pAllocation)
7024 VMA_ASSERT(pAllocation);
7026 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7027 allocInfo.memoryTypeIndex = memTypeIndex;
7028 allocInfo.allocationSize = size;
7030 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7031 if(m_UseKhrDedicatedAllocation)
7033 if(dedicatedBuffer != VK_NULL_HANDLE)
7035 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7036 dedicatedAllocInfo.buffer = dedicatedBuffer;
7037 allocInfo.pNext = &dedicatedAllocInfo;
7039 else if(dedicatedImage != VK_NULL_HANDLE)
7041 dedicatedAllocInfo.image = dedicatedImage;
7042 allocInfo.pNext = &dedicatedAllocInfo;
7047 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7048 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7051 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7055 void* pMappedData =
nullptr;
7058 res = (*m_VulkanFunctions.vkMapMemory)(
7067 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7068 FreeVulkanMemory(memTypeIndex, size, hMemory);
7073 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7074 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7075 (*pAllocation)->SetUserData(
this, pUserData);
7079 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7080 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7081 VMA_ASSERT(pDedicatedAllocations);
7082 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7085 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7090 void VmaAllocator_T::GetBufferMemoryRequirements(
7092 VkMemoryRequirements& memReq,
7093 bool& requiresDedicatedAllocation,
7094 bool& prefersDedicatedAllocation)
const 7096 if(m_UseKhrDedicatedAllocation)
7098 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7099 memReqInfo.buffer = hBuffer;
7101 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7103 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7104 memReq2.pNext = &memDedicatedReq;
7106 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7108 memReq = memReq2.memoryRequirements;
7109 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7110 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7114 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7115 requiresDedicatedAllocation =
false;
7116 prefersDedicatedAllocation =
false;
7120 void VmaAllocator_T::GetImageMemoryRequirements(
7122 VkMemoryRequirements& memReq,
7123 bool& requiresDedicatedAllocation,
7124 bool& prefersDedicatedAllocation)
const 7126 if(m_UseKhrDedicatedAllocation)
7128 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7129 memReqInfo.image = hImage;
7131 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7133 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7134 memReq2.pNext = &memDedicatedReq;
7136 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7138 memReq = memReq2.memoryRequirements;
7139 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7140 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7144 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7145 requiresDedicatedAllocation =
false;
7146 prefersDedicatedAllocation =
false;
7150 VkResult VmaAllocator_T::AllocateMemory(
7151 const VkMemoryRequirements& vkMemReq,
7152 bool requiresDedicatedAllocation,
7153 bool prefersDedicatedAllocation,
7154 VkBuffer dedicatedBuffer,
7155 VkImage dedicatedImage,
7157 VmaSuballocationType suballocType,
7158 VmaAllocation* pAllocation)
7163 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7164 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7169 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7170 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7172 if(requiresDedicatedAllocation)
7176 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7177 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7179 if(createInfo.
pool != VK_NULL_HANDLE)
7181 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7182 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7185 if((createInfo.
pool != VK_NULL_HANDLE) &&
7188 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7189 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7192 if(createInfo.
pool != VK_NULL_HANDLE)
7194 return createInfo.
pool->m_BlockVector.Allocate(
7196 m_CurrentFrameIndex.load(),
7205 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7206 uint32_t memTypeIndex = UINT32_MAX;
7208 if(res == VK_SUCCESS)
7210 res = AllocateMemoryOfType(
7212 requiresDedicatedAllocation || prefersDedicatedAllocation,
7220 if(res == VK_SUCCESS)
7230 memoryTypeBits &= ~(1u << memTypeIndex);
7233 if(res == VK_SUCCESS)
7235 res = AllocateMemoryOfType(
7237 requiresDedicatedAllocation || prefersDedicatedAllocation,
7245 if(res == VK_SUCCESS)
7255 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7266 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7268 VMA_ASSERT(allocation);
7270 if(allocation->CanBecomeLost() ==
false ||
7271 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7273 switch(allocation->GetType())
7275 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7277 VmaBlockVector* pBlockVector = VMA_NULL;
7278 VmaPool hPool = allocation->GetPool();
7279 if(hPool != VK_NULL_HANDLE)
7281 pBlockVector = &hPool->m_BlockVector;
7285 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7286 pBlockVector = m_pBlockVectors[memTypeIndex];
7288 pBlockVector->Free(allocation);
7291 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7292 FreeDedicatedMemory(allocation);
7299 allocation->SetUserData(
this, VMA_NULL);
7300 vma_delete(
this, allocation);
7303 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7306 InitStatInfo(pStats->
total);
7307 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7309 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7313 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7315 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7316 VMA_ASSERT(pBlockVector);
7317 pBlockVector->AddStats(pStats);
7322 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7323 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7325 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7330 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7332 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7333 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7334 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7335 VMA_ASSERT(pDedicatedAllocVector);
7336 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7339 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7340 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7341 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7342 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7347 VmaPostprocessCalcStatInfo(pStats->
total);
7348 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7349 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7350 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7351 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7354 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7356 VkResult VmaAllocator_T::Defragment(
7357 VmaAllocation* pAllocations,
7358 size_t allocationCount,
7359 VkBool32* pAllocationsChanged,
7363 if(pAllocationsChanged != VMA_NULL)
7365 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7367 if(pDefragmentationStats != VMA_NULL)
7369 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7372 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7374 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7376 const size_t poolCount = m_Pools.size();
7379 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7381 VmaAllocation hAlloc = pAllocations[allocIndex];
7383 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7385 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7387 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7389 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7391 VmaBlockVector* pAllocBlockVector =
nullptr;
7393 const VmaPool hAllocPool = hAlloc->GetPool();
7395 if(hAllocPool != VK_NULL_HANDLE)
7397 pAllocBlockVector = &hAllocPool->GetBlockVector();
7402 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7405 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7407 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7408 &pAllocationsChanged[allocIndex] : VMA_NULL;
7409 pDefragmentator->AddAllocation(hAlloc, pChanged);
7413 VkResult result = VK_SUCCESS;
7417 VkDeviceSize maxBytesToMove = SIZE_MAX;
7418 uint32_t maxAllocationsToMove = UINT32_MAX;
7419 if(pDefragmentationInfo != VMA_NULL)
7426 for(uint32_t memTypeIndex = 0;
7427 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7431 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7433 result = m_pBlockVectors[memTypeIndex]->Defragment(
7434 pDefragmentationStats,
7436 maxAllocationsToMove);
7441 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7443 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7444 pDefragmentationStats,
7446 maxAllocationsToMove);
7452 for(
size_t poolIndex = poolCount; poolIndex--; )
7454 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7458 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7460 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7462 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7469 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7471 if(hAllocation->CanBecomeLost())
7477 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7478 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7481 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7485 pAllocationInfo->
offset = 0;
7486 pAllocationInfo->
size = hAllocation->GetSize();
7488 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7491 else if(localLastUseFrameIndex == localCurrFrameIndex)
7493 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7494 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7495 pAllocationInfo->
offset = hAllocation->GetOffset();
7496 pAllocationInfo->
size = hAllocation->GetSize();
7498 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7503 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7505 localLastUseFrameIndex = localCurrFrameIndex;
7512 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7513 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7514 pAllocationInfo->
offset = hAllocation->GetOffset();
7515 pAllocationInfo->
size = hAllocation->GetSize();
7516 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7517 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7521 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7523 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7536 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7538 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7539 if(res != VK_SUCCESS)
7541 vma_delete(
this, *pPool);
7548 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7549 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7555 void VmaAllocator_T::DestroyPool(VmaPool pool)
7559 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7560 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7561 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7564 vma_delete(
this, pool);
7567 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7569 pool->m_BlockVector.GetPoolStats(pPoolStats);
7572 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7574 m_CurrentFrameIndex.store(frameIndex);
7577 void VmaAllocator_T::MakePoolAllocationsLost(
7579 size_t* pLostAllocationCount)
7581 hPool->m_BlockVector.MakePoolAllocationsLost(
7582 m_CurrentFrameIndex.load(),
7583 pLostAllocationCount);
7586 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7588 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7589 (*pAllocation)->InitLost();
7592 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7594 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7597 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7599 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7600 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7602 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7603 if(res == VK_SUCCESS)
7605 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7610 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7615 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7618 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7620 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7626 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7628 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7630 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7633 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7635 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7636 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7638 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7639 m_HeapSizeLimit[heapIndex] += size;
7643 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
7645 if(hAllocation->CanBecomeLost())
7647 return VK_ERROR_MEMORY_MAP_FAILED;
7650 switch(hAllocation->GetType())
7652 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7654 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7655 char *pBytes =
nullptr;
7656 VkResult res = pBlock->Map(
this, (
void**)&pBytes);
7657 if(res == VK_SUCCESS)
7659 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
7660 hAllocation->BlockAllocMap();
7664 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7665 return hAllocation->DedicatedAllocMap(
this, ppData);
7668 return VK_ERROR_MEMORY_MAP_FAILED;
7672 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
7674 switch(hAllocation->GetType())
7676 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7678 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7679 hAllocation->BlockAllocUnmap();
7680 pBlock->Unmap(
this);
7683 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7684 hAllocation->DedicatedAllocUnmap(
this);
7691 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7693 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7695 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7697 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7698 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7699 VMA_ASSERT(pDedicatedAllocations);
7700 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7701 VMA_ASSERT(success);
7704 VkDeviceMemory hMemory = allocation->GetMemory();
7706 if(allocation->GetMappedData() != VMA_NULL)
7708 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7711 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7713 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7716 #if VMA_STATS_STRING_ENABLED 7718 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7720 bool dedicatedAllocationsStarted =
false;
7721 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7723 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7724 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7725 VMA_ASSERT(pDedicatedAllocVector);
7726 if(pDedicatedAllocVector->empty() ==
false)
7728 if(dedicatedAllocationsStarted ==
false)
7730 dedicatedAllocationsStarted =
true;
7731 json.WriteString(
"DedicatedAllocations");
7735 json.BeginString(
"Type ");
7736 json.ContinueString(memTypeIndex);
7741 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7743 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7744 json.BeginObject(
true);
7746 json.WriteString(
"Type");
7747 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7749 json.WriteString(
"Size");
7750 json.WriteNumber(hAlloc->GetSize());
7752 const void* pUserData = hAlloc->GetUserData();
7753 if(pUserData != VMA_NULL)
7755 json.WriteString(
"UserData");
7756 if(hAlloc->IsUserDataString())
7758 json.WriteString((
const char*)pUserData);
7763 json.ContinueString_Pointer(pUserData);
7774 if(dedicatedAllocationsStarted)
7780 bool allocationsStarted =
false;
7781 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7783 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
7785 if(allocationsStarted ==
false)
7787 allocationsStarted =
true;
7788 json.WriteString(
"DefaultPools");
7792 json.BeginString(
"Type ");
7793 json.ContinueString(memTypeIndex);
7796 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
7799 if(allocationsStarted)
7806 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7807 const size_t poolCount = m_Pools.size();
7810 json.WriteString(
"Pools");
7812 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7814 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7821 #endif // #if VMA_STATS_STRING_ENABLED 7823 static VkResult AllocateMemoryForImage(
7824 VmaAllocator allocator,
7827 VmaSuballocationType suballocType,
7828 VmaAllocation* pAllocation)
7830 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7832 VkMemoryRequirements vkMemReq = {};
7833 bool requiresDedicatedAllocation =
false;
7834 bool prefersDedicatedAllocation =
false;
7835 allocator->GetImageMemoryRequirements(image, vkMemReq,
7836 requiresDedicatedAllocation, prefersDedicatedAllocation);
7838 return allocator->AllocateMemory(
7840 requiresDedicatedAllocation,
7841 prefersDedicatedAllocation,
7844 *pAllocationCreateInfo,
7854 VmaAllocator* pAllocator)
7856 VMA_ASSERT(pCreateInfo && pAllocator);
7857 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7863 VmaAllocator allocator)
7865 if(allocator != VK_NULL_HANDLE)
7867 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7868 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7869 vma_delete(&allocationCallbacks, allocator);
7874 VmaAllocator allocator,
7875 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7877 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7878 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7882 VmaAllocator allocator,
7883 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7885 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7886 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7890 VmaAllocator allocator,
7891 uint32_t memoryTypeIndex,
7892 VkMemoryPropertyFlags* pFlags)
7894 VMA_ASSERT(allocator && pFlags);
7895 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7896 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7900 VmaAllocator allocator,
7901 uint32_t frameIndex)
7903 VMA_ASSERT(allocator);
7904 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7906 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7908 allocator->SetCurrentFrameIndex(frameIndex);
7912 VmaAllocator allocator,
7915 VMA_ASSERT(allocator && pStats);
7916 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7917 allocator->CalculateStats(pStats);
7920 #if VMA_STATS_STRING_ENABLED 7923 VmaAllocator allocator,
7924 char** ppStatsString,
7925 VkBool32 detailedMap)
7927 VMA_ASSERT(allocator && ppStatsString);
7928 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7930 VmaStringBuilder sb(allocator);
7932 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7936 allocator->CalculateStats(&stats);
7938 json.WriteString(
"Total");
7939 VmaPrintStatInfo(json, stats.
total);
7941 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7943 json.BeginString(
"Heap ");
7944 json.ContinueString(heapIndex);
7948 json.WriteString(
"Size");
7949 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7951 json.WriteString(
"Flags");
7952 json.BeginArray(
true);
7953 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7955 json.WriteString(
"DEVICE_LOCAL");
7961 json.WriteString(
"Stats");
7962 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7965 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7967 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7969 json.BeginString(
"Type ");
7970 json.ContinueString(typeIndex);
7975 json.WriteString(
"Flags");
7976 json.BeginArray(
true);
7977 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7978 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7980 json.WriteString(
"DEVICE_LOCAL");
7982 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7984 json.WriteString(
"HOST_VISIBLE");
7986 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7988 json.WriteString(
"HOST_COHERENT");
7990 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7992 json.WriteString(
"HOST_CACHED");
7994 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7996 json.WriteString(
"LAZILY_ALLOCATED");
8002 json.WriteString(
"Stats");
8003 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8012 if(detailedMap == VK_TRUE)
8014 allocator->PrintDetailedMap(json);
8020 const size_t len = sb.GetLength();
8021 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8024 memcpy(pChars, sb.GetData(), len);
8027 *ppStatsString = pChars;
8031 VmaAllocator allocator,
8034 if(pStatsString != VMA_NULL)
8036 VMA_ASSERT(allocator);
8037 size_t len = strlen(pStatsString);
8038 vma_delete_array(allocator, pStatsString, len + 1);
8042 #endif // #if VMA_STATS_STRING_ENABLED 8048 VmaAllocator allocator,
8049 uint32_t memoryTypeBits,
8051 uint32_t* pMemoryTypeIndex)
8053 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8054 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8055 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8062 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8066 switch(pAllocationCreateInfo->
usage)
8071 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8074 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8077 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8078 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8081 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8082 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8088 *pMemoryTypeIndex = UINT32_MAX;
8089 uint32_t minCost = UINT32_MAX;
8090 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8091 memTypeIndex < allocator->GetMemoryTypeCount();
8092 ++memTypeIndex, memTypeBit <<= 1)
8095 if((memTypeBit & memoryTypeBits) != 0)
8097 const VkMemoryPropertyFlags currFlags =
8098 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8100 if((requiredFlags & ~currFlags) == 0)
8103 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8105 if(currCost < minCost)
8107 *pMemoryTypeIndex = memTypeIndex;
8117 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8121 VmaAllocator allocator,
8125 VMA_ASSERT(allocator && pCreateInfo && pPool);
8127 VMA_DEBUG_LOG(
"vmaCreatePool");
8129 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8131 return allocator->CreatePool(pCreateInfo, pPool);
8135 VmaAllocator allocator,
8138 VMA_ASSERT(allocator);
8140 if(pool == VK_NULL_HANDLE)
8145 VMA_DEBUG_LOG(
"vmaDestroyPool");
8147 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8149 allocator->DestroyPool(pool);
8153 VmaAllocator allocator,
8157 VMA_ASSERT(allocator && pool && pPoolStats);
8159 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8161 allocator->GetPoolStats(pool, pPoolStats);
8165 VmaAllocator allocator,
8167 size_t* pLostAllocationCount)
8169 VMA_ASSERT(allocator && pool);
8171 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8173 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8177 VmaAllocator allocator,
8178 const VkMemoryRequirements* pVkMemoryRequirements,
8180 VmaAllocation* pAllocation,
8183 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8185 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8187 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8189 VkResult result = allocator->AllocateMemory(
8190 *pVkMemoryRequirements,
8196 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8199 if(pAllocationInfo && result == VK_SUCCESS)
8201 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8208 VmaAllocator allocator,
8211 VmaAllocation* pAllocation,
8214 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8216 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8218 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8220 VkMemoryRequirements vkMemReq = {};
8221 bool requiresDedicatedAllocation =
false;
8222 bool prefersDedicatedAllocation =
false;
8223 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8224 requiresDedicatedAllocation,
8225 prefersDedicatedAllocation);
8227 VkResult result = allocator->AllocateMemory(
8229 requiresDedicatedAllocation,
8230 prefersDedicatedAllocation,
8234 VMA_SUBALLOCATION_TYPE_BUFFER,
8237 if(pAllocationInfo && result == VK_SUCCESS)
8239 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8246 VmaAllocator allocator,
8249 VmaAllocation* pAllocation,
8252 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8254 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8256 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8258 VkResult result = AllocateMemoryForImage(
8262 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8265 if(pAllocationInfo && result == VK_SUCCESS)
8267 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8274 VmaAllocator allocator,
8275 VmaAllocation allocation)
8277 VMA_ASSERT(allocator && allocation);
8279 VMA_DEBUG_LOG(
"vmaFreeMemory");
8281 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8283 allocator->FreeMemory(allocation);
8287 VmaAllocator allocator,
8288 VmaAllocation allocation,
8291 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8293 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8295 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8299 VmaAllocator allocator,
8300 VmaAllocation allocation,
8303 VMA_ASSERT(allocator && allocation);
8305 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8307 allocation->SetUserData(allocator, pUserData);
8311 VmaAllocator allocator,
8312 VmaAllocation* pAllocation)
8314 VMA_ASSERT(allocator && pAllocation);
8316 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8318 allocator->CreateLostAllocation(pAllocation);
8322 VmaAllocator allocator,
8323 VmaAllocation allocation,
8326 VMA_ASSERT(allocator && allocation && ppData);
8328 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8330 return allocator->Map(allocation, ppData);
8334 VmaAllocator allocator,
8335 VmaAllocation allocation)
8337 VMA_ASSERT(allocator && allocation);
8339 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8341 allocator->Unmap(allocation);
8345 VmaAllocator allocator,
8346 VmaAllocation* pAllocations,
8347 size_t allocationCount,
8348 VkBool32* pAllocationsChanged,
8352 VMA_ASSERT(allocator && pAllocations);
8354 VMA_DEBUG_LOG(
"vmaDefragment");
8356 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8358 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8362 VmaAllocator allocator,
8363 const VkBufferCreateInfo* pBufferCreateInfo,
8366 VmaAllocation* pAllocation,
8369 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8371 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8373 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8375 *pBuffer = VK_NULL_HANDLE;
8376 *pAllocation = VK_NULL_HANDLE;
8379 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8380 allocator->m_hDevice,
8382 allocator->GetAllocationCallbacks(),
8387 VkMemoryRequirements vkMemReq = {};
8388 bool requiresDedicatedAllocation =
false;
8389 bool prefersDedicatedAllocation =
false;
8390 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8391 requiresDedicatedAllocation, prefersDedicatedAllocation);
8395 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8397 VMA_ASSERT(vkMemReq.alignment %
8398 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8400 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8402 VMA_ASSERT(vkMemReq.alignment %
8403 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8405 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8407 VMA_ASSERT(vkMemReq.alignment %
8408 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8412 res = allocator->AllocateMemory(
8414 requiresDedicatedAllocation,
8415 prefersDedicatedAllocation,
8418 *pAllocationCreateInfo,
8419 VMA_SUBALLOCATION_TYPE_BUFFER,
8424 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8425 allocator->m_hDevice,
8427 (*pAllocation)->GetMemory(),
8428 (*pAllocation)->GetOffset());
8432 if(pAllocationInfo != VMA_NULL)
8434 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8438 allocator->FreeMemory(*pAllocation);
8439 *pAllocation = VK_NULL_HANDLE;
8440 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8441 *pBuffer = VK_NULL_HANDLE;
8444 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8445 *pBuffer = VK_NULL_HANDLE;
8452 VmaAllocator allocator,
8454 VmaAllocation allocation)
8456 if(buffer != VK_NULL_HANDLE)
8458 VMA_ASSERT(allocator);
8460 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8462 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8464 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8466 allocator->FreeMemory(allocation);
8471 VmaAllocator allocator,
8472 const VkImageCreateInfo* pImageCreateInfo,
8475 VmaAllocation* pAllocation,
8478 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8480 VMA_DEBUG_LOG(
"vmaCreateImage");
8482 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8484 *pImage = VK_NULL_HANDLE;
8485 *pAllocation = VK_NULL_HANDLE;
8488 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8489 allocator->m_hDevice,
8491 allocator->GetAllocationCallbacks(),
8495 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8496 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8497 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8500 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8504 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8505 allocator->m_hDevice,
8507 (*pAllocation)->GetMemory(),
8508 (*pAllocation)->GetOffset());
8512 if(pAllocationInfo != VMA_NULL)
8514 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8518 allocator->FreeMemory(*pAllocation);
8519 *pAllocation = VK_NULL_HANDLE;
8520 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8521 *pImage = VK_NULL_HANDLE;
8524 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8525 *pImage = VK_NULL_HANDLE;
8532 VmaAllocator allocator,
8534 VmaAllocation allocation)
8536 if(image != VK_NULL_HANDLE)
8538 VMA_ASSERT(allocator);
8540 VMA_DEBUG_LOG(
"vmaDestroyImage");
8542 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8544 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8546 allocator->FreeMemory(allocation);
8550 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:758
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1009
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:783
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:768
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:968
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:762
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1277
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:780
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1443
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1147
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1201
Definition: vk_mem_alloc.h:1046
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:751
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1084
Definition: vk_mem_alloc.h:993
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:792
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:845
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:777
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:997
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:910
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:765
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:909
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:773
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1447
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:809
VmaStatInfo total
Definition: vk_mem_alloc.h:919
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1455
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1068
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1438
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:766
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:693
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:786
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1155
Definition: vk_mem_alloc.h:1149
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1287
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:763
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1105
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1171
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1207
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:749
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1158
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
VmaMemoryUsage
Definition: vk_mem_alloc.h:947
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1433
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1451
Definition: vk_mem_alloc.h:984
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1092
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:764
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:915
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:699
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:720
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:725
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1453
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1079
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1217
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:759
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:898
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1166
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:712
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1053
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:911
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:716
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1161
Definition: vk_mem_alloc.h:992
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1074
Definition: vk_mem_alloc.h:1065
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:901
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:761
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1179
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:795
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1210
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1063
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1098
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:833
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:917
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1033
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:910
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:770
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:714
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:769
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1193
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1301
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:789
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:910
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:907
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1198
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1282
Definition: vk_mem_alloc.h:1061
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1449
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:757
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:772
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:905
Definition: vk_mem_alloc.h:952
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1151
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:903
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:767
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:771
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1020
Definition: vk_mem_alloc.h:977
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1296
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:747
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:760
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1263
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1129
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:911
Definition: vk_mem_alloc.h:1059
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:918
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1204
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:911
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1268