23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 869 #include <vulkan/vulkan.h> 871 VK_DEFINE_HANDLE(VmaAllocator)
875 VmaAllocator allocator,
877 VkDeviceMemory memory,
881 VmaAllocator allocator,
883 VkDeviceMemory memory,
1032 VmaAllocator* pAllocator);
1036 VmaAllocator allocator);
1043 VmaAllocator allocator,
1044 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1051 VmaAllocator allocator,
1052 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1061 VmaAllocator allocator,
1062 uint32_t memoryTypeIndex,
1063 VkMemoryPropertyFlags* pFlags);
1074 VmaAllocator allocator,
1075 uint32_t frameIndex);
1105 VmaAllocator allocator,
1108 #define VMA_STATS_STRING_ENABLED 1 1110 #if VMA_STATS_STRING_ENABLED 1116 VmaAllocator allocator,
1117 char** ppStatsString,
1118 VkBool32 detailedMap);
1121 VmaAllocator allocator,
1122 char* pStatsString);
1124 #endif // #if VMA_STATS_STRING_ENABLED 1126 VK_DEFINE_HANDLE(VmaPool)
1309 VmaAllocator allocator,
1310 uint32_t memoryTypeBits,
1312 uint32_t* pMemoryTypeIndex);
1327 VmaAllocator allocator,
1328 const VkBufferCreateInfo* pBufferCreateInfo,
1330 uint32_t* pMemoryTypeIndex);
1345 VmaAllocator allocator,
1346 const VkImageCreateInfo* pImageCreateInfo,
1348 uint32_t* pMemoryTypeIndex);
1449 VmaAllocator allocator,
1456 VmaAllocator allocator,
1466 VmaAllocator allocator,
1477 VmaAllocator allocator,
1479 size_t* pLostAllocationCount);
1481 VK_DEFINE_HANDLE(VmaAllocation)
1537 VmaAllocator allocator,
1538 const VkMemoryRequirements* pVkMemoryRequirements,
1540 VmaAllocation* pAllocation,
1550 VmaAllocator allocator,
1553 VmaAllocation* pAllocation,
1558 VmaAllocator allocator,
1561 VmaAllocation* pAllocation,
1566 VmaAllocator allocator,
1567 VmaAllocation allocation);
1586 VmaAllocator allocator,
1587 VmaAllocation allocation,
1605 VmaAllocator allocator,
1606 VmaAllocation allocation);
1622 VmaAllocator allocator,
1623 VmaAllocation allocation,
1637 VmaAllocator allocator,
1638 VmaAllocation* pAllocation);
1675 VmaAllocator allocator,
1676 VmaAllocation allocation,
1684 VmaAllocator allocator,
1685 VmaAllocation allocation);
1796 VmaAllocator allocator,
1797 VmaAllocation* pAllocations,
1798 size_t allocationCount,
1799 VkBool32* pAllocationsChanged,
1830 VmaAllocator allocator,
1831 const VkBufferCreateInfo* pBufferCreateInfo,
1834 VmaAllocation* pAllocation,
1849 VmaAllocator allocator,
1851 VmaAllocation allocation);
1855 VmaAllocator allocator,
1856 const VkImageCreateInfo* pImageCreateInfo,
1859 VmaAllocation* pAllocation,
1874 VmaAllocator allocator,
1876 VmaAllocation allocation);
1882 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1885 #ifdef __INTELLISENSE__ 1886 #define VMA_IMPLEMENTATION 1889 #ifdef VMA_IMPLEMENTATION 1890 #undef VMA_IMPLEMENTATION 1912 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 1913 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1925 #if VMA_USE_STL_CONTAINERS 1926 #define VMA_USE_STL_VECTOR 1 1927 #define VMA_USE_STL_UNORDERED_MAP 1 1928 #define VMA_USE_STL_LIST 1 1931 #if VMA_USE_STL_VECTOR 1935 #if VMA_USE_STL_UNORDERED_MAP 1936 #include <unordered_map> 1939 #if VMA_USE_STL_LIST 1948 #include <algorithm> 1952 #if !defined(_WIN32) && !defined(__APPLE__) 1958 #define VMA_NULL nullptr 1961 #if defined(__APPLE__) || defined(__ANDROID__) 1963 void *aligned_alloc(
size_t alignment,
size_t size)
1966 if(alignment <
sizeof(
void*))
1968 alignment =
sizeof(
void*);
1972 if(posix_memalign(&pointer, alignment, size) == 0)
1981 #define VMA_ASSERT(expr) assert(expr) 1983 #define VMA_ASSERT(expr) 1989 #ifndef VMA_HEAVY_ASSERT 1991 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1993 #define VMA_HEAVY_ASSERT(expr) 1997 #ifndef VMA_ALIGN_OF 1998 #define VMA_ALIGN_OF(type) (__alignof(type)) 2001 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2003 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2005 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2009 #ifndef VMA_SYSTEM_FREE 2011 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2013 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2018 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2022 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2026 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2030 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2033 #ifndef VMA_DEBUG_LOG 2034 #define VMA_DEBUG_LOG(format, ...) 2044 #if VMA_STATS_STRING_ENABLED 2045 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2047 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2049 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2051 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2053 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2055 snprintf(outStr, strLen,
"%p", ptr);
2065 void Lock() { m_Mutex.lock(); }
2066 void Unlock() { m_Mutex.unlock(); }
2070 #define VMA_MUTEX VmaMutex 2081 #ifndef VMA_ATOMIC_UINT32 2082 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2085 #ifndef VMA_BEST_FIT 2098 #define VMA_BEST_FIT (1) 2101 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2106 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2109 #ifndef VMA_DEBUG_ALIGNMENT 2114 #define VMA_DEBUG_ALIGNMENT (1) 2117 #ifndef VMA_DEBUG_MARGIN 2122 #define VMA_DEBUG_MARGIN (0) 2125 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2130 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2133 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2138 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2141 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2142 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2146 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2147 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2151 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2157 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2158 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2161 static inline uint32_t VmaCountBitsSet(uint32_t v)
2163 uint32_t c = v - ((v >> 1) & 0x55555555);
2164 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2165 c = ((c >> 4) + c) & 0x0F0F0F0F;
2166 c = ((c >> 8) + c) & 0x00FF00FF;
2167 c = ((c >> 16) + c) & 0x0000FFFF;
2173 template <
typename T>
2174 static inline T VmaAlignUp(T val, T align)
2176 return (val + align - 1) / align * align;
2180 template <
typename T>
2181 inline T VmaRoundDiv(T x, T y)
2183 return (x + (y / (T)2)) / y;
2188 template<
typename Iterator,
typename Compare>
2189 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2191 Iterator centerValue = end; --centerValue;
2192 Iterator insertIndex = beg;
2193 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2195 if(cmp(*memTypeIndex, *centerValue))
2197 if(insertIndex != memTypeIndex)
2199 VMA_SWAP(*memTypeIndex, *insertIndex);
2204 if(insertIndex != centerValue)
2206 VMA_SWAP(*insertIndex, *centerValue);
2211 template<
typename Iterator,
typename Compare>
2212 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2216 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2217 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2218 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2222 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2224 #endif // #ifndef VMA_SORT 2233 static inline bool VmaBlocksOnSamePage(
2234 VkDeviceSize resourceAOffset,
2235 VkDeviceSize resourceASize,
2236 VkDeviceSize resourceBOffset,
2237 VkDeviceSize pageSize)
2239 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2240 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2241 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2242 VkDeviceSize resourceBStart = resourceBOffset;
2243 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2244 return resourceAEndPage == resourceBStartPage;
2247 enum VmaSuballocationType
2249 VMA_SUBALLOCATION_TYPE_FREE = 0,
2250 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2251 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2252 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2253 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2254 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2255 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2264 static inline bool VmaIsBufferImageGranularityConflict(
2265 VmaSuballocationType suballocType1,
2266 VmaSuballocationType suballocType2)
2268 if(suballocType1 > suballocType2)
2270 VMA_SWAP(suballocType1, suballocType2);
2273 switch(suballocType1)
2275 case VMA_SUBALLOCATION_TYPE_FREE:
2277 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2279 case VMA_SUBALLOCATION_TYPE_BUFFER:
2281 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2282 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2283 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2285 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2286 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2287 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2288 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2290 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2291 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2303 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2304 m_pMutex(useMutex ? &mutex : VMA_NULL)
2321 VMA_MUTEX* m_pMutex;
2324 #if VMA_DEBUG_GLOBAL_MUTEX 2325 static VMA_MUTEX gDebugGlobalMutex;
2326 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2328 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2332 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2343 template <
typename IterT,
typename KeyT,
typename CmpT>
2344 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2346 size_t down = 0, up = (end - beg);
2349 const size_t mid = (down + up) / 2;
2350 if(cmp(*(beg+mid), key))
2365 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2367 if((pAllocationCallbacks != VMA_NULL) &&
2368 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2370 return (*pAllocationCallbacks->pfnAllocation)(
2371 pAllocationCallbacks->pUserData,
2374 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2378 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2382 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2384 if((pAllocationCallbacks != VMA_NULL) &&
2385 (pAllocationCallbacks->pfnFree != VMA_NULL))
2387 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2391 VMA_SYSTEM_FREE(ptr);
2395 template<
typename T>
2396 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2398 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2401 template<
typename T>
2402 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2404 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2407 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2409 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2411 template<
typename T>
2412 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2415 VmaFree(pAllocationCallbacks, ptr);
2418 template<
typename T>
2419 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2423 for(
size_t i = count; i--; )
2427 VmaFree(pAllocationCallbacks, ptr);
2432 template<
typename T>
2433 class VmaStlAllocator
2436 const VkAllocationCallbacks*
const m_pCallbacks;
2437 typedef T value_type;
2439 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2440 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2442 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2443 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2445 template<
typename U>
2446 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2448 return m_pCallbacks == rhs.m_pCallbacks;
2450 template<
typename U>
2451 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2453 return m_pCallbacks != rhs.m_pCallbacks;
2456 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2459 #if VMA_USE_STL_VECTOR 2461 #define VmaVector std::vector 2463 template<
typename T,
typename allocatorT>
2464 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2466 vec.insert(vec.begin() + index, item);
2469 template<
typename T,
typename allocatorT>
2470 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2472 vec.erase(vec.begin() + index);
2475 #else // #if VMA_USE_STL_VECTOR 2480 template<
typename T,
typename AllocatorT>
2484 typedef T value_type;
2486 VmaVector(
const AllocatorT& allocator) :
2487 m_Allocator(allocator),
2494 VmaVector(
size_t count,
const AllocatorT& allocator) :
2495 m_Allocator(allocator),
2496 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2502 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2503 m_Allocator(src.m_Allocator),
2504 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2505 m_Count(src.m_Count),
2506 m_Capacity(src.m_Count)
2510 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2516 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2519 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2523 resize(rhs.m_Count);
2526 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2532 bool empty()
const {
return m_Count == 0; }
2533 size_t size()
const {
return m_Count; }
2534 T* data() {
return m_pArray; }
2535 const T* data()
const {
return m_pArray; }
2537 T& operator[](
size_t index)
2539 VMA_HEAVY_ASSERT(index < m_Count);
2540 return m_pArray[index];
2542 const T& operator[](
size_t index)
const 2544 VMA_HEAVY_ASSERT(index < m_Count);
2545 return m_pArray[index];
2550 VMA_HEAVY_ASSERT(m_Count > 0);
2553 const T& front()
const 2555 VMA_HEAVY_ASSERT(m_Count > 0);
2560 VMA_HEAVY_ASSERT(m_Count > 0);
2561 return m_pArray[m_Count - 1];
2563 const T& back()
const 2565 VMA_HEAVY_ASSERT(m_Count > 0);
2566 return m_pArray[m_Count - 1];
2569 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2571 newCapacity = VMA_MAX(newCapacity, m_Count);
2573 if((newCapacity < m_Capacity) && !freeMemory)
2575 newCapacity = m_Capacity;
2578 if(newCapacity != m_Capacity)
2580 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2583 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2585 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2586 m_Capacity = newCapacity;
2587 m_pArray = newArray;
2591 void resize(
size_t newCount,
bool freeMemory =
false)
2593 size_t newCapacity = m_Capacity;
2594 if(newCount > m_Capacity)
2596 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2600 newCapacity = newCount;
2603 if(newCapacity != m_Capacity)
2605 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2606 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2607 if(elementsToCopy != 0)
2609 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2611 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2612 m_Capacity = newCapacity;
2613 m_pArray = newArray;
2619 void clear(
bool freeMemory =
false)
2621 resize(0, freeMemory);
2624 void insert(
size_t index,
const T& src)
2626 VMA_HEAVY_ASSERT(index <= m_Count);
2627 const size_t oldCount = size();
2628 resize(oldCount + 1);
2629 if(index < oldCount)
2631 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2633 m_pArray[index] = src;
2636 void remove(
size_t index)
2638 VMA_HEAVY_ASSERT(index < m_Count);
2639 const size_t oldCount = size();
2640 if(index < oldCount - 1)
2642 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2644 resize(oldCount - 1);
2647 void push_back(
const T& src)
2649 const size_t newIndex = size();
2650 resize(newIndex + 1);
2651 m_pArray[newIndex] = src;
2656 VMA_HEAVY_ASSERT(m_Count > 0);
2660 void push_front(
const T& src)
2667 VMA_HEAVY_ASSERT(m_Count > 0);
2671 typedef T* iterator;
2673 iterator begin() {
return m_pArray; }
2674 iterator end() {
return m_pArray + m_Count; }
2677 AllocatorT m_Allocator;
2683 template<
typename T,
typename allocatorT>
2684 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2686 vec.insert(index, item);
2689 template<
typename T,
typename allocatorT>
2690 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2695 #endif // #if VMA_USE_STL_VECTOR 2697 template<
typename CmpLess,
typename VectorT>
2698 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2700 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2702 vector.data() + vector.size(),
2704 CmpLess()) - vector.data();
2705 VmaVectorInsert(vector, indexToInsert, value);
2706 return indexToInsert;
2709 template<
typename CmpLess,
typename VectorT>
2710 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2713 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2718 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2720 size_t indexToRemove = it - vector.begin();
2721 VmaVectorRemove(vector, indexToRemove);
2727 template<
typename CmpLess,
typename VectorT>
2728 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2731 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2733 vector.data() + vector.size(),
2736 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2738 return it - vector.begin();
2742 return vector.size();
2754 template<
typename T>
2755 class VmaPoolAllocator
2758 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2759 ~VmaPoolAllocator();
2767 uint32_t NextFreeIndex;
2774 uint32_t FirstFreeIndex;
2777 const VkAllocationCallbacks* m_pAllocationCallbacks;
2778 size_t m_ItemsPerBlock;
2779 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2781 ItemBlock& CreateNewBlock();
2784 template<
typename T>
2785 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2786 m_pAllocationCallbacks(pAllocationCallbacks),
2787 m_ItemsPerBlock(itemsPerBlock),
2788 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2790 VMA_ASSERT(itemsPerBlock > 0);
2793 template<
typename T>
2794 VmaPoolAllocator<T>::~VmaPoolAllocator()
2799 template<
typename T>
2800 void VmaPoolAllocator<T>::Clear()
2802 for(
size_t i = m_ItemBlocks.size(); i--; )
2803 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2804 m_ItemBlocks.clear();
2807 template<
typename T>
2808 T* VmaPoolAllocator<T>::Alloc()
2810 for(
size_t i = m_ItemBlocks.size(); i--; )
2812 ItemBlock& block = m_ItemBlocks[i];
2814 if(block.FirstFreeIndex != UINT32_MAX)
2816 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2817 block.FirstFreeIndex = pItem->NextFreeIndex;
2818 return &pItem->Value;
2823 ItemBlock& newBlock = CreateNewBlock();
2824 Item*
const pItem = &newBlock.pItems[0];
2825 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2826 return &pItem->Value;
2829 template<
typename T>
2830 void VmaPoolAllocator<T>::Free(T* ptr)
2833 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2835 ItemBlock& block = m_ItemBlocks[i];
2839 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2842 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2844 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2845 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2846 block.FirstFreeIndex = index;
2850 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2853 template<
typename T>
2854 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2856 ItemBlock newBlock = {
2857 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2859 m_ItemBlocks.push_back(newBlock);
2862 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2863 newBlock.pItems[i].NextFreeIndex = i + 1;
2864 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2865 return m_ItemBlocks.back();
2871 #if VMA_USE_STL_LIST 2873 #define VmaList std::list 2875 #else // #if VMA_USE_STL_LIST 2877 template<
typename T>
2886 template<
typename T>
2890 typedef VmaListItem<T> ItemType;
2892 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2896 size_t GetCount()
const {
return m_Count; }
2897 bool IsEmpty()
const {
return m_Count == 0; }
2899 ItemType* Front() {
return m_pFront; }
2900 const ItemType* Front()
const {
return m_pFront; }
2901 ItemType* Back() {
return m_pBack; }
2902 const ItemType* Back()
const {
return m_pBack; }
2904 ItemType* PushBack();
2905 ItemType* PushFront();
2906 ItemType* PushBack(
const T& value);
2907 ItemType* PushFront(
const T& value);
2912 ItemType* InsertBefore(ItemType* pItem);
2914 ItemType* InsertAfter(ItemType* pItem);
2916 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2917 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2919 void Remove(ItemType* pItem);
2922 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2923 VmaPoolAllocator<ItemType> m_ItemAllocator;
2929 VmaRawList(
const VmaRawList<T>& src);
2930 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2933 template<
typename T>
2934 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2935 m_pAllocationCallbacks(pAllocationCallbacks),
2936 m_ItemAllocator(pAllocationCallbacks, 128),
2943 template<
typename T>
2944 VmaRawList<T>::~VmaRawList()
2950 template<
typename T>
2951 void VmaRawList<T>::Clear()
2953 if(IsEmpty() ==
false)
2955 ItemType* pItem = m_pBack;
2956 while(pItem != VMA_NULL)
2958 ItemType*
const pPrevItem = pItem->pPrev;
2959 m_ItemAllocator.Free(pItem);
2962 m_pFront = VMA_NULL;
2968 template<
typename T>
2969 VmaListItem<T>* VmaRawList<T>::PushBack()
2971 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2972 pNewItem->pNext = VMA_NULL;
2975 pNewItem->pPrev = VMA_NULL;
2976 m_pFront = pNewItem;
2982 pNewItem->pPrev = m_pBack;
2983 m_pBack->pNext = pNewItem;
2990 template<
typename T>
2991 VmaListItem<T>* VmaRawList<T>::PushFront()
2993 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2994 pNewItem->pPrev = VMA_NULL;
2997 pNewItem->pNext = VMA_NULL;
2998 m_pFront = pNewItem;
3004 pNewItem->pNext = m_pFront;
3005 m_pFront->pPrev = pNewItem;
3006 m_pFront = pNewItem;
3012 template<
typename T>
3013 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3015 ItemType*
const pNewItem = PushBack();
3016 pNewItem->Value = value;
3020 template<
typename T>
3021 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3023 ItemType*
const pNewItem = PushFront();
3024 pNewItem->Value = value;
3028 template<
typename T>
3029 void VmaRawList<T>::PopBack()
3031 VMA_HEAVY_ASSERT(m_Count > 0);
3032 ItemType*
const pBackItem = m_pBack;
3033 ItemType*
const pPrevItem = pBackItem->pPrev;
3034 if(pPrevItem != VMA_NULL)
3036 pPrevItem->pNext = VMA_NULL;
3038 m_pBack = pPrevItem;
3039 m_ItemAllocator.Free(pBackItem);
3043 template<
typename T>
3044 void VmaRawList<T>::PopFront()
3046 VMA_HEAVY_ASSERT(m_Count > 0);
3047 ItemType*
const pFrontItem = m_pFront;
3048 ItemType*
const pNextItem = pFrontItem->pNext;
3049 if(pNextItem != VMA_NULL)
3051 pNextItem->pPrev = VMA_NULL;
3053 m_pFront = pNextItem;
3054 m_ItemAllocator.Free(pFrontItem);
3058 template<
typename T>
3059 void VmaRawList<T>::Remove(ItemType* pItem)
3061 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3062 VMA_HEAVY_ASSERT(m_Count > 0);
3064 if(pItem->pPrev != VMA_NULL)
3066 pItem->pPrev->pNext = pItem->pNext;
3070 VMA_HEAVY_ASSERT(m_pFront == pItem);
3071 m_pFront = pItem->pNext;
3074 if(pItem->pNext != VMA_NULL)
3076 pItem->pNext->pPrev = pItem->pPrev;
3080 VMA_HEAVY_ASSERT(m_pBack == pItem);
3081 m_pBack = pItem->pPrev;
3084 m_ItemAllocator.Free(pItem);
3088 template<
typename T>
3089 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3091 if(pItem != VMA_NULL)
3093 ItemType*
const prevItem = pItem->pPrev;
3094 ItemType*
const newItem = m_ItemAllocator.Alloc();
3095 newItem->pPrev = prevItem;
3096 newItem->pNext = pItem;
3097 pItem->pPrev = newItem;
3098 if(prevItem != VMA_NULL)
3100 prevItem->pNext = newItem;
3104 VMA_HEAVY_ASSERT(m_pFront == pItem);
3114 template<
typename T>
3115 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3117 if(pItem != VMA_NULL)
3119 ItemType*
const nextItem = pItem->pNext;
3120 ItemType*
const newItem = m_ItemAllocator.Alloc();
3121 newItem->pNext = nextItem;
3122 newItem->pPrev = pItem;
3123 pItem->pNext = newItem;
3124 if(nextItem != VMA_NULL)
3126 nextItem->pPrev = newItem;
3130 VMA_HEAVY_ASSERT(m_pBack == pItem);
3140 template<
typename T>
3141 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3143 ItemType*
const newItem = InsertBefore(pItem);
3144 newItem->Value = value;
3148 template<
typename T>
3149 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3151 ItemType*
const newItem = InsertAfter(pItem);
3152 newItem->Value = value;
3156 template<
typename T,
typename AllocatorT>
3169 T& operator*()
const 3171 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3172 return m_pItem->Value;
3174 T* operator->()
const 3176 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3177 return &m_pItem->Value;
3180 iterator& operator++()
3182 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3183 m_pItem = m_pItem->pNext;
3186 iterator& operator--()
3188 if(m_pItem != VMA_NULL)
3190 m_pItem = m_pItem->pPrev;
3194 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3195 m_pItem = m_pList->Back();
3200 iterator operator++(
int)
3202 iterator result = *
this;
3206 iterator operator--(
int)
3208 iterator result = *
this;
3213 bool operator==(
const iterator& rhs)
const 3215 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3216 return m_pItem == rhs.m_pItem;
3218 bool operator!=(
const iterator& rhs)
const 3220 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3221 return m_pItem != rhs.m_pItem;
3225 VmaRawList<T>* m_pList;
3226 VmaListItem<T>* m_pItem;
3228 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3234 friend class VmaList<T, AllocatorT>;
3237 class const_iterator
3246 const_iterator(
const iterator& src) :
3247 m_pList(src.m_pList),
3248 m_pItem(src.m_pItem)
3252 const T& operator*()
const 3254 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3255 return m_pItem->Value;
3257 const T* operator->()
const 3259 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3260 return &m_pItem->Value;
3263 const_iterator& operator++()
3265 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3266 m_pItem = m_pItem->pNext;
3269 const_iterator& operator--()
3271 if(m_pItem != VMA_NULL)
3273 m_pItem = m_pItem->pPrev;
3277 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3278 m_pItem = m_pList->Back();
3283 const_iterator operator++(
int)
3285 const_iterator result = *
this;
3289 const_iterator operator--(
int)
3291 const_iterator result = *
this;
3296 bool operator==(
const const_iterator& rhs)
const 3298 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3299 return m_pItem == rhs.m_pItem;
3301 bool operator!=(
const const_iterator& rhs)
const 3303 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3304 return m_pItem != rhs.m_pItem;
3308 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3314 const VmaRawList<T>* m_pList;
3315 const VmaListItem<T>* m_pItem;
3317 friend class VmaList<T, AllocatorT>;
3320 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3322 bool empty()
const {
return m_RawList.IsEmpty(); }
3323 size_t size()
const {
return m_RawList.GetCount(); }
3325 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3326 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3328 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3329 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3331 void clear() { m_RawList.Clear(); }
3332 void push_back(
const T& value) { m_RawList.PushBack(value); }
3333 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3334 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3337 VmaRawList<T> m_RawList;
3340 #endif // #if VMA_USE_STL_LIST 3348 #if VMA_USE_STL_UNORDERED_MAP 3350 #define VmaPair std::pair 3352 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3353 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3355 #else // #if VMA_USE_STL_UNORDERED_MAP 3357 template<
typename T1,
typename T2>
3363 VmaPair() : first(), second() { }
3364 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3370 template<
typename KeyT,
typename ValueT>
3374 typedef VmaPair<KeyT, ValueT> PairType;
3375 typedef PairType* iterator;
3377 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3379 iterator begin() {
return m_Vector.begin(); }
3380 iterator end() {
return m_Vector.end(); }
3382 void insert(
const PairType& pair);
3383 iterator find(
const KeyT& key);
3384 void erase(iterator it);
3387 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3390 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3392 template<
typename FirstT,
typename SecondT>
3393 struct VmaPairFirstLess
3395 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3397 return lhs.first < rhs.first;
3399 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3401 return lhs.first < rhsFirst;
3405 template<
typename KeyT,
typename ValueT>
3406 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3408 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3410 m_Vector.data() + m_Vector.size(),
3412 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3413 VmaVectorInsert(m_Vector, indexToInsert, pair);
3416 template<
typename KeyT,
typename ValueT>
3417 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3419 PairType* it = VmaBinaryFindFirstNotLess(
3421 m_Vector.data() + m_Vector.size(),
3423 VmaPairFirstLess<KeyT, ValueT>());
3424 if((it != m_Vector.end()) && (it->first == key))
3430 return m_Vector.end();
3434 template<
typename KeyT,
typename ValueT>
3435 void VmaMap<KeyT, ValueT>::erase(iterator it)
3437 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3440 #endif // #if VMA_USE_STL_UNORDERED_MAP 3446 class VmaDeviceMemoryBlock;
3448 struct VmaAllocation_T
3451 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3455 FLAG_USER_DATA_STRING = 0x01,
3459 enum ALLOCATION_TYPE
3461 ALLOCATION_TYPE_NONE,
3462 ALLOCATION_TYPE_BLOCK,
3463 ALLOCATION_TYPE_DEDICATED,
3466 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3469 m_pUserData(VMA_NULL),
3470 m_LastUseFrameIndex(currentFrameIndex),
3471 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3472 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3474 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3480 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3483 VMA_ASSERT(m_pUserData == VMA_NULL);
3486 void InitBlockAllocation(
3488 VmaDeviceMemoryBlock* block,
3489 VkDeviceSize offset,
3490 VkDeviceSize alignment,
3492 VmaSuballocationType suballocationType,
3496 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3497 VMA_ASSERT(block != VMA_NULL);
3498 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3499 m_Alignment = alignment;
3501 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3502 m_SuballocationType = (uint8_t)suballocationType;
3503 m_BlockAllocation.m_hPool = hPool;
3504 m_BlockAllocation.m_Block = block;
3505 m_BlockAllocation.m_Offset = offset;
3506 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3511 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3512 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3513 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3514 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3515 m_BlockAllocation.m_Block = VMA_NULL;
3516 m_BlockAllocation.m_Offset = 0;
3517 m_BlockAllocation.m_CanBecomeLost =
true;
3520 void ChangeBlockAllocation(
3521 VmaAllocator hAllocator,
3522 VmaDeviceMemoryBlock* block,
3523 VkDeviceSize offset);
3526 void InitDedicatedAllocation(
3527 uint32_t memoryTypeIndex,
3528 VkDeviceMemory hMemory,
3529 VmaSuballocationType suballocationType,
3533 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3534 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3535 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3538 m_SuballocationType = (uint8_t)suballocationType;
3539 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3540 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3541 m_DedicatedAllocation.m_hMemory = hMemory;
3542 m_DedicatedAllocation.m_pMappedData = pMappedData;
3545 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3546 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3547 VkDeviceSize GetSize()
const {
return m_Size; }
3548 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3549 void* GetUserData()
const {
return m_pUserData; }
3550 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3551 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3553 VmaDeviceMemoryBlock* GetBlock()
const 3555 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3556 return m_BlockAllocation.m_Block;
3558 VkDeviceSize GetOffset()
const;
3559 VkDeviceMemory GetMemory()
const;
3560 uint32_t GetMemoryTypeIndex()
const;
3561 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3562 void* GetMappedData()
const;
3563 bool CanBecomeLost()
const;
3564 VmaPool GetPool()
const;
3566 uint32_t GetLastUseFrameIndex()
const 3568 return m_LastUseFrameIndex.load();
3570 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3572 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3582 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3584 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3586 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3597 void BlockAllocMap();
3598 void BlockAllocUnmap();
3599 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3600 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3603 VkDeviceSize m_Alignment;
3604 VkDeviceSize m_Size;
3606 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3608 uint8_t m_SuballocationType;
3615 struct BlockAllocation
3618 VmaDeviceMemoryBlock* m_Block;
3619 VkDeviceSize m_Offset;
3620 bool m_CanBecomeLost;
3624 struct DedicatedAllocation
3626 uint32_t m_MemoryTypeIndex;
3627 VkDeviceMemory m_hMemory;
3628 void* m_pMappedData;
3634 BlockAllocation m_BlockAllocation;
3636 DedicatedAllocation m_DedicatedAllocation;
3639 void FreeUserDataString(VmaAllocator hAllocator);
3646 struct VmaSuballocation
3648 VkDeviceSize offset;
3650 VmaAllocation hAllocation;
3651 VmaSuballocationType type;
3654 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3657 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3672 struct VmaAllocationRequest
3674 VkDeviceSize offset;
3675 VkDeviceSize sumFreeSize;
3676 VkDeviceSize sumItemSize;
3677 VmaSuballocationList::iterator item;
3678 size_t itemsToMakeLostCount;
3680 VkDeviceSize CalcCost()
const 3682 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3690 class VmaBlockMetadata
3693 VmaBlockMetadata(VmaAllocator hAllocator);
3694 ~VmaBlockMetadata();
3695 void Init(VkDeviceSize size);
3698 bool Validate()
const;
3699 VkDeviceSize GetSize()
const {
return m_Size; }
3700 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3701 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3702 VkDeviceSize GetUnusedRangeSizeMax()
const;
3704 bool IsEmpty()
const;
3706 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3709 #if VMA_STATS_STRING_ENABLED 3710 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3714 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3719 bool CreateAllocationRequest(
3720 uint32_t currentFrameIndex,
3721 uint32_t frameInUseCount,
3722 VkDeviceSize bufferImageGranularity,
3723 VkDeviceSize allocSize,
3724 VkDeviceSize allocAlignment,
3725 VmaSuballocationType allocType,
3726 bool canMakeOtherLost,
3727 VmaAllocationRequest* pAllocationRequest);
3729 bool MakeRequestedAllocationsLost(
3730 uint32_t currentFrameIndex,
3731 uint32_t frameInUseCount,
3732 VmaAllocationRequest* pAllocationRequest);
3734 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3738 const VmaAllocationRequest& request,
3739 VmaSuballocationType type,
3740 VkDeviceSize allocSize,
3741 VmaAllocation hAllocation);
3744 void Free(
const VmaAllocation allocation);
3745 void FreeAtOffset(VkDeviceSize offset);
3748 VkDeviceSize m_Size;
3749 uint32_t m_FreeCount;
3750 VkDeviceSize m_SumFreeSize;
3751 VmaSuballocationList m_Suballocations;
3754 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3756 bool ValidateFreeSuballocationList()
const;
3760 bool CheckAllocation(
3761 uint32_t currentFrameIndex,
3762 uint32_t frameInUseCount,
3763 VkDeviceSize bufferImageGranularity,
3764 VkDeviceSize allocSize,
3765 VkDeviceSize allocAlignment,
3766 VmaSuballocationType allocType,
3767 VmaSuballocationList::const_iterator suballocItem,
3768 bool canMakeOtherLost,
3769 VkDeviceSize* pOffset,
3770 size_t* itemsToMakeLostCount,
3771 VkDeviceSize* pSumFreeSize,
3772 VkDeviceSize* pSumItemSize)
const;
3774 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3778 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3781 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3784 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3788 class VmaDeviceMemoryMapping
3791 VmaDeviceMemoryMapping();
3792 ~VmaDeviceMemoryMapping();
3794 void* GetMappedData()
const {
return m_pMappedData; }
3797 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData);
3798 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count);
3802 uint32_t m_MapCount;
3803 void* m_pMappedData;
3812 class VmaDeviceMemoryBlock
3815 uint32_t m_MemoryTypeIndex;
3816 VkDeviceMemory m_hMemory;
3817 VmaDeviceMemoryMapping m_Mapping;
3818 VmaBlockMetadata m_Metadata;
3820 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3822 ~VmaDeviceMemoryBlock()
3824 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3829 uint32_t newMemoryTypeIndex,
3830 VkDeviceMemory newMemory,
3831 VkDeviceSize newSize);
3833 void Destroy(VmaAllocator allocator);
3836 bool Validate()
const;
3839 VkResult Map(VmaAllocator hAllocator, uint32_t count,
void** ppData);
3840 void Unmap(VmaAllocator hAllocator, uint32_t count);
3843 struct VmaPointerLess
3845 bool operator()(
const void* lhs,
const void* rhs)
const 3851 class VmaDefragmentator;
3859 struct VmaBlockVector
3862 VmaAllocator hAllocator,
3863 uint32_t memoryTypeIndex,
3864 VkDeviceSize preferredBlockSize,
3865 size_t minBlockCount,
3866 size_t maxBlockCount,
3867 VkDeviceSize bufferImageGranularity,
3868 uint32_t frameInUseCount,
3872 VkResult CreateMinBlocks();
3874 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3875 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3876 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3877 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3881 bool IsEmpty()
const {
return m_Blocks.empty(); }
3884 VmaPool hCurrentPool,
3885 uint32_t currentFrameIndex,
3886 const VkMemoryRequirements& vkMemReq,
3888 VmaSuballocationType suballocType,
3889 VmaAllocation* pAllocation);
3892 VmaAllocation hAllocation);
3897 #if VMA_STATS_STRING_ENABLED 3898 void PrintDetailedMap(
class VmaJsonWriter& json);
3901 void MakePoolAllocationsLost(
3902 uint32_t currentFrameIndex,
3903 size_t* pLostAllocationCount);
3905 VmaDefragmentator* EnsureDefragmentator(
3906 VmaAllocator hAllocator,
3907 uint32_t currentFrameIndex);
3909 VkResult Defragment(
3911 VkDeviceSize& maxBytesToMove,
3912 uint32_t& maxAllocationsToMove);
3914 void DestroyDefragmentator();
3917 friend class VmaDefragmentator;
3919 const VmaAllocator m_hAllocator;
3920 const uint32_t m_MemoryTypeIndex;
3921 const VkDeviceSize m_PreferredBlockSize;
3922 const size_t m_MinBlockCount;
3923 const size_t m_MaxBlockCount;
3924 const VkDeviceSize m_BufferImageGranularity;
3925 const uint32_t m_FrameInUseCount;
3926 const bool m_IsCustomPool;
3929 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3933 bool m_HasEmptyBlock;
3934 VmaDefragmentator* m_pDefragmentator;
3936 size_t CalcMaxBlockSize()
const;
3939 void Remove(VmaDeviceMemoryBlock* pBlock);
3943 void IncrementallySortBlocks();
3945 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3951 VmaBlockVector m_BlockVector;
3955 VmaAllocator hAllocator,
3959 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3961 #if VMA_STATS_STRING_ENABLED 3966 class VmaDefragmentator
3968 const VmaAllocator m_hAllocator;
3969 VmaBlockVector*
const m_pBlockVector;
3970 uint32_t m_CurrentFrameIndex;
3971 VkDeviceSize m_BytesMoved;
3972 uint32_t m_AllocationsMoved;
3974 struct AllocationInfo
3976 VmaAllocation m_hAllocation;
3977 VkBool32* m_pChanged;
3980 m_hAllocation(VK_NULL_HANDLE),
3981 m_pChanged(VMA_NULL)
3986 struct AllocationInfoSizeGreater
3988 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3990 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3995 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3999 VmaDeviceMemoryBlock* m_pBlock;
4000 bool m_HasNonMovableAllocations;
4001 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4003 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4005 m_HasNonMovableAllocations(true),
4006 m_Allocations(pAllocationCallbacks),
4007 m_pMappedDataForDefragmentation(VMA_NULL)
4011 void CalcHasNonMovableAllocations()
4013 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4014 const size_t defragmentAllocCount = m_Allocations.size();
4015 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4018 void SortAllocationsBySizeDescecnding()
4020 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4023 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
4024 void Unmap(VmaAllocator hAllocator);
4028 void* m_pMappedDataForDefragmentation;
4031 struct BlockPointerLess
4033 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4035 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4037 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4039 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4045 struct BlockInfoCompareMoveDestination
4047 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4049 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4053 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4057 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4065 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4066 BlockInfoVector m_Blocks;
4068 VkResult DefragmentRound(
4069 VkDeviceSize maxBytesToMove,
4070 uint32_t maxAllocationsToMove);
4072 static bool MoveMakesSense(
4073 size_t dstBlockIndex, VkDeviceSize dstOffset,
4074 size_t srcBlockIndex, VkDeviceSize srcOffset);
4078 VmaAllocator hAllocator,
4079 VmaBlockVector* pBlockVector,
4080 uint32_t currentFrameIndex);
4082 ~VmaDefragmentator();
4084 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4085 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4087 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
4089 VkResult Defragment(
4090 VkDeviceSize maxBytesToMove,
4091 uint32_t maxAllocationsToMove);
4095 struct VmaAllocator_T
4098 bool m_UseKhrDedicatedAllocation;
4100 bool m_AllocationCallbacksSpecified;
4101 VkAllocationCallbacks m_AllocationCallbacks;
4105 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4106 VMA_MUTEX m_HeapSizeLimitMutex;
4108 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4109 VkPhysicalDeviceMemoryProperties m_MemProps;
4112 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4115 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4116 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4117 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4122 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4124 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4128 return m_VulkanFunctions;
4131 VkDeviceSize GetBufferImageGranularity()
const 4134 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4135 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4138 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4139 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4141 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4143 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4144 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4147 void GetBufferMemoryRequirements(
4149 VkMemoryRequirements& memReq,
4150 bool& requiresDedicatedAllocation,
4151 bool& prefersDedicatedAllocation)
const;
4152 void GetImageMemoryRequirements(
4154 VkMemoryRequirements& memReq,
4155 bool& requiresDedicatedAllocation,
4156 bool& prefersDedicatedAllocation)
const;
4159 VkResult AllocateMemory(
4160 const VkMemoryRequirements& vkMemReq,
4161 bool requiresDedicatedAllocation,
4162 bool prefersDedicatedAllocation,
4163 VkBuffer dedicatedBuffer,
4164 VkImage dedicatedImage,
4166 VmaSuballocationType suballocType,
4167 VmaAllocation* pAllocation);
4170 void FreeMemory(
const VmaAllocation allocation);
4172 void CalculateStats(
VmaStats* pStats);
4174 #if VMA_STATS_STRING_ENABLED 4175 void PrintDetailedMap(
class VmaJsonWriter& json);
4178 VkResult Defragment(
4179 VmaAllocation* pAllocations,
4180 size_t allocationCount,
4181 VkBool32* pAllocationsChanged,
4185 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
4186 bool TouchAllocation(VmaAllocation hAllocation);
4189 void DestroyPool(VmaPool pool);
4190 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
4192 void SetCurrentFrameIndex(uint32_t frameIndex);
4194 void MakePoolAllocationsLost(
4196 size_t* pLostAllocationCount);
4198 void CreateLostAllocation(VmaAllocation* pAllocation);
4200 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4201 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4203 VkResult Map(VmaAllocation hAllocation,
void** ppData);
4204 void Unmap(VmaAllocation hAllocation);
4207 VkDeviceSize m_PreferredLargeHeapBlockSize;
4209 VkPhysicalDevice m_PhysicalDevice;
4210 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4212 VMA_MUTEX m_PoolsMutex;
4214 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4220 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4222 VkResult AllocateMemoryOfType(
4223 const VkMemoryRequirements& vkMemReq,
4224 bool dedicatedAllocation,
4225 VkBuffer dedicatedBuffer,
4226 VkImage dedicatedImage,
4228 uint32_t memTypeIndex,
4229 VmaSuballocationType suballocType,
4230 VmaAllocation* pAllocation);
4233 VkResult AllocateDedicatedMemory(
4235 VmaSuballocationType suballocType,
4236 uint32_t memTypeIndex,
4238 bool isUserDataString,
4240 VkBuffer dedicatedBuffer,
4241 VkImage dedicatedImage,
4242 VmaAllocation* pAllocation);
4245 void FreeDedicatedMemory(VmaAllocation allocation);
4251 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
4253 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4256 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
4258 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4261 template<
typename T>
4262 static T* VmaAllocate(VmaAllocator hAllocator)
4264 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4267 template<
typename T>
4268 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
4270 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4273 template<
typename T>
4274 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4279 VmaFree(hAllocator, ptr);
4283 template<
typename T>
4284 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4288 for(
size_t i = count; i--; )
4290 VmaFree(hAllocator, ptr);
4297 #if VMA_STATS_STRING_ENABLED 4299 class VmaStringBuilder
4302 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4303 size_t GetLength()
const {
return m_Data.size(); }
4304 const char* GetData()
const {
return m_Data.data(); }
4306 void Add(
char ch) { m_Data.push_back(ch); }
4307 void Add(
const char* pStr);
4308 void AddNewLine() { Add(
'\n'); }
4309 void AddNumber(uint32_t num);
4310 void AddNumber(uint64_t num);
4311 void AddPointer(
const void* ptr);
4314 VmaVector< char, VmaStlAllocator<char> > m_Data;
4317 void VmaStringBuilder::Add(
const char* pStr)
4319 const size_t strLen = strlen(pStr);
4322 const size_t oldCount = m_Data.size();
4323 m_Data.resize(oldCount + strLen);
4324 memcpy(m_Data.data() + oldCount, pStr, strLen);
4328 void VmaStringBuilder::AddNumber(uint32_t num)
4331 VmaUint32ToStr(buf,
sizeof(buf), num);
4335 void VmaStringBuilder::AddNumber(uint64_t num)
4338 VmaUint64ToStr(buf,
sizeof(buf), num);
4342 void VmaStringBuilder::AddPointer(
const void* ptr)
4345 VmaPtrToStr(buf,
sizeof(buf), ptr);
4349 #endif // #if VMA_STATS_STRING_ENABLED 4354 #if VMA_STATS_STRING_ENABLED 4359 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4362 void BeginObject(
bool singleLine =
false);
4365 void BeginArray(
bool singleLine =
false);
4368 void WriteString(
const char* pStr);
4369 void BeginString(
const char* pStr = VMA_NULL);
4370 void ContinueString(
const char* pStr);
4371 void ContinueString(uint32_t n);
4372 void ContinueString(uint64_t n);
4373 void ContinueString_Pointer(
const void* ptr);
4374 void EndString(
const char* pStr = VMA_NULL);
4376 void WriteNumber(uint32_t n);
4377 void WriteNumber(uint64_t n);
4378 void WriteBool(
bool b);
4382 static const char*
const INDENT;
4384 enum COLLECTION_TYPE
4386 COLLECTION_TYPE_OBJECT,
4387 COLLECTION_TYPE_ARRAY,
4391 COLLECTION_TYPE type;
4392 uint32_t valueCount;
4393 bool singleLineMode;
4396 VmaStringBuilder& m_SB;
4397 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4398 bool m_InsideString;
4400 void BeginValue(
bool isString);
4401 void WriteIndent(
bool oneLess =
false);
4404 const char*
const VmaJsonWriter::INDENT =
" ";
4406 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4408 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4409 m_InsideString(false)
4413 VmaJsonWriter::~VmaJsonWriter()
4415 VMA_ASSERT(!m_InsideString);
4416 VMA_ASSERT(m_Stack.empty());
4419 void VmaJsonWriter::BeginObject(
bool singleLine)
4421 VMA_ASSERT(!m_InsideString);
4427 item.type = COLLECTION_TYPE_OBJECT;
4428 item.valueCount = 0;
4429 item.singleLineMode = singleLine;
4430 m_Stack.push_back(item);
4433 void VmaJsonWriter::EndObject()
4435 VMA_ASSERT(!m_InsideString);
4440 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4444 void VmaJsonWriter::BeginArray(
bool singleLine)
4446 VMA_ASSERT(!m_InsideString);
4452 item.type = COLLECTION_TYPE_ARRAY;
4453 item.valueCount = 0;
4454 item.singleLineMode = singleLine;
4455 m_Stack.push_back(item);
4458 void VmaJsonWriter::EndArray()
4460 VMA_ASSERT(!m_InsideString);
4465 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4469 void VmaJsonWriter::WriteString(
const char* pStr)
4475 void VmaJsonWriter::BeginString(
const char* pStr)
4477 VMA_ASSERT(!m_InsideString);
4481 m_InsideString =
true;
4482 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4484 ContinueString(pStr);
4488 void VmaJsonWriter::ContinueString(
const char* pStr)
4490 VMA_ASSERT(m_InsideString);
4492 const size_t strLen = strlen(pStr);
4493 for(
size_t i = 0; i < strLen; ++i)
4526 VMA_ASSERT(0 &&
"Character not currently supported.");
4532 void VmaJsonWriter::ContinueString(uint32_t n)
4534 VMA_ASSERT(m_InsideString);
4538 void VmaJsonWriter::ContinueString(uint64_t n)
4540 VMA_ASSERT(m_InsideString);
4544 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4546 VMA_ASSERT(m_InsideString);
4547 m_SB.AddPointer(ptr);
4550 void VmaJsonWriter::EndString(
const char* pStr)
4552 VMA_ASSERT(m_InsideString);
4553 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4555 ContinueString(pStr);
4558 m_InsideString =
false;
4561 void VmaJsonWriter::WriteNumber(uint32_t n)
4563 VMA_ASSERT(!m_InsideString);
4568 void VmaJsonWriter::WriteNumber(uint64_t n)
4570 VMA_ASSERT(!m_InsideString);
4575 void VmaJsonWriter::WriteBool(
bool b)
4577 VMA_ASSERT(!m_InsideString);
4579 m_SB.Add(b ?
"true" :
"false");
4582 void VmaJsonWriter::WriteNull()
4584 VMA_ASSERT(!m_InsideString);
4589 void VmaJsonWriter::BeginValue(
bool isString)
4591 if(!m_Stack.empty())
4593 StackItem& currItem = m_Stack.back();
4594 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4595 currItem.valueCount % 2 == 0)
4597 VMA_ASSERT(isString);
4600 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4601 currItem.valueCount % 2 != 0)
4605 else if(currItem.valueCount > 0)
4614 ++currItem.valueCount;
4618 void VmaJsonWriter::WriteIndent(
bool oneLess)
4620 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4624 size_t count = m_Stack.size();
4625 if(count > 0 && oneLess)
4629 for(
size_t i = 0; i < count; ++i)
4636 #endif // #if VMA_STATS_STRING_ENABLED 4640 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4642 if(IsUserDataString())
4644 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4646 FreeUserDataString(hAllocator);
4648 if(pUserData != VMA_NULL)
4650 const char*
const newStrSrc = (
char*)pUserData;
4651 const size_t newStrLen = strlen(newStrSrc);
4652 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4653 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4654 m_pUserData = newStrDst;
4659 m_pUserData = pUserData;
4663 void VmaAllocation_T::ChangeBlockAllocation(
4664 VmaAllocator hAllocator,
4665 VmaDeviceMemoryBlock* block,
4666 VkDeviceSize offset)
4668 VMA_ASSERT(block != VMA_NULL);
4669 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4672 if(block != m_BlockAllocation.m_Block)
4674 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4675 if(IsPersistentMap())
4677 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4678 block->Map(hAllocator, mapRefCount, VMA_NULL);
4681 m_BlockAllocation.m_Block = block;
4682 m_BlockAllocation.m_Offset = offset;
4685 VkDeviceSize VmaAllocation_T::GetOffset()
const 4689 case ALLOCATION_TYPE_BLOCK:
4690 return m_BlockAllocation.m_Offset;
4691 case ALLOCATION_TYPE_DEDICATED:
4699 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4703 case ALLOCATION_TYPE_BLOCK:
4704 return m_BlockAllocation.m_Block->m_hMemory;
4705 case ALLOCATION_TYPE_DEDICATED:
4706 return m_DedicatedAllocation.m_hMemory;
4709 return VK_NULL_HANDLE;
4713 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4717 case ALLOCATION_TYPE_BLOCK:
4718 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4719 case ALLOCATION_TYPE_DEDICATED:
4720 return m_DedicatedAllocation.m_MemoryTypeIndex;
4727 void* VmaAllocation_T::GetMappedData()
const 4731 case ALLOCATION_TYPE_BLOCK:
4734 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4735 VMA_ASSERT(pBlockData != VMA_NULL);
4736 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4743 case ALLOCATION_TYPE_DEDICATED:
4744 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4745 return m_DedicatedAllocation.m_pMappedData;
4752 bool VmaAllocation_T::CanBecomeLost()
const 4756 case ALLOCATION_TYPE_BLOCK:
4757 return m_BlockAllocation.m_CanBecomeLost;
4758 case ALLOCATION_TYPE_DEDICATED:
4766 VmaPool VmaAllocation_T::GetPool()
const 4768 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4769 return m_BlockAllocation.m_hPool;
4772 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4774 VMA_ASSERT(CanBecomeLost());
4780 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4783 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4788 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4794 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4804 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4806 VMA_ASSERT(IsUserDataString());
4807 if(m_pUserData != VMA_NULL)
4809 char*
const oldStr = (
char*)m_pUserData;
4810 const size_t oldStrLen = strlen(oldStr);
4811 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4812 m_pUserData = VMA_NULL;
4816 void VmaAllocation_T::BlockAllocMap()
4818 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4820 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4826 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4830 void VmaAllocation_T::BlockAllocUnmap()
4832 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4834 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4840 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4844 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4846 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4850 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4852 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4853 *ppData = m_DedicatedAllocation.m_pMappedData;
4859 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4860 return VK_ERROR_MEMORY_MAP_FAILED;
4865 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4866 hAllocator->m_hDevice,
4867 m_DedicatedAllocation.m_hMemory,
4872 if(result == VK_SUCCESS)
4874 m_DedicatedAllocation.m_pMappedData = *ppData;
4881 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4883 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4885 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4890 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4891 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4892 hAllocator->m_hDevice,
4893 m_DedicatedAllocation.m_hMemory);
4898 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4902 #if VMA_STATS_STRING_ENABLED 4905 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4914 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4918 json.WriteString(
"Blocks");
4921 json.WriteString(
"Allocations");
4924 json.WriteString(
"UnusedRanges");
4927 json.WriteString(
"UsedBytes");
4930 json.WriteString(
"UnusedBytes");
4935 json.WriteString(
"AllocationSize");
4936 json.BeginObject(
true);
4937 json.WriteString(
"Min");
4939 json.WriteString(
"Avg");
4941 json.WriteString(
"Max");
4948 json.WriteString(
"UnusedRangeSize");
4949 json.BeginObject(
true);
4950 json.WriteString(
"Min");
4952 json.WriteString(
"Avg");
4954 json.WriteString(
"Max");
4962 #endif // #if VMA_STATS_STRING_ENABLED 4964 struct VmaSuballocationItemSizeLess
4967 const VmaSuballocationList::iterator lhs,
4968 const VmaSuballocationList::iterator rhs)
const 4970 return lhs->size < rhs->size;
4973 const VmaSuballocationList::iterator lhs,
4974 VkDeviceSize rhsSize)
const 4976 return lhs->size < rhsSize;
4983 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4987 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4988 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4992 VmaBlockMetadata::~VmaBlockMetadata()
4996 void VmaBlockMetadata::Init(VkDeviceSize size)
5000 m_SumFreeSize = size;
5002 VmaSuballocation suballoc = {};
5003 suballoc.offset = 0;
5004 suballoc.size = size;
5005 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5006 suballoc.hAllocation = VK_NULL_HANDLE;
5008 m_Suballocations.push_back(suballoc);
5009 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5011 m_FreeSuballocationsBySize.push_back(suballocItem);
5014 bool VmaBlockMetadata::Validate()
const 5016 if(m_Suballocations.empty())
5022 VkDeviceSize calculatedOffset = 0;
5024 uint32_t calculatedFreeCount = 0;
5026 VkDeviceSize calculatedSumFreeSize = 0;
5029 size_t freeSuballocationsToRegister = 0;
5031 bool prevFree =
false;
5033 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5034 suballocItem != m_Suballocations.cend();
5037 const VmaSuballocation& subAlloc = *suballocItem;
5040 if(subAlloc.offset != calculatedOffset)
5045 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5047 if(prevFree && currFree)
5052 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5059 calculatedSumFreeSize += subAlloc.size;
5060 ++calculatedFreeCount;
5061 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5063 ++freeSuballocationsToRegister;
5068 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5072 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5078 calculatedOffset += subAlloc.size;
5079 prevFree = currFree;
5084 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5089 VkDeviceSize lastSize = 0;
5090 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5092 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5095 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5100 if(suballocItem->size < lastSize)
5105 lastSize = suballocItem->size;
5109 if(!ValidateFreeSuballocationList() ||
5110 (calculatedOffset != m_Size) ||
5111 (calculatedSumFreeSize != m_SumFreeSize) ||
5112 (calculatedFreeCount != m_FreeCount))
5120 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5122 if(!m_FreeSuballocationsBySize.empty())
5124 return m_FreeSuballocationsBySize.back()->size;
5132 bool VmaBlockMetadata::IsEmpty()
const 5134 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5137 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5141 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5153 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5154 suballocItem != m_Suballocations.cend();
5157 const VmaSuballocation& suballoc = *suballocItem;
5158 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5171 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5173 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5175 inoutStats.
size += m_Size;
5182 #if VMA_STATS_STRING_ENABLED 5184 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5188 json.WriteString(
"TotalBytes");
5189 json.WriteNumber(m_Size);
5191 json.WriteString(
"UnusedBytes");
5192 json.WriteNumber(m_SumFreeSize);
5194 json.WriteString(
"Allocations");
5195 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5197 json.WriteString(
"UnusedRanges");
5198 json.WriteNumber(m_FreeCount);
5200 json.WriteString(
"Suballocations");
5203 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5204 suballocItem != m_Suballocations.cend();
5205 ++suballocItem, ++i)
5207 json.BeginObject(
true);
5209 json.WriteString(
"Type");
5210 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5212 json.WriteString(
"Size");
5213 json.WriteNumber(suballocItem->size);
5215 json.WriteString(
"Offset");
5216 json.WriteNumber(suballocItem->offset);
5218 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5220 const void* pUserData = suballocItem->hAllocation->GetUserData();
5221 if(pUserData != VMA_NULL)
5223 json.WriteString(
"UserData");
5224 if(suballocItem->hAllocation->IsUserDataString())
5226 json.WriteString((
const char*)pUserData);
5231 json.ContinueString_Pointer(pUserData);
5244 #endif // #if VMA_STATS_STRING_ENABLED 5256 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5258 VMA_ASSERT(IsEmpty());
5259 pAllocationRequest->offset = 0;
5260 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5261 pAllocationRequest->sumItemSize = 0;
5262 pAllocationRequest->item = m_Suballocations.begin();
5263 pAllocationRequest->itemsToMakeLostCount = 0;
5266 bool VmaBlockMetadata::CreateAllocationRequest(
5267 uint32_t currentFrameIndex,
5268 uint32_t frameInUseCount,
5269 VkDeviceSize bufferImageGranularity,
5270 VkDeviceSize allocSize,
5271 VkDeviceSize allocAlignment,
5272 VmaSuballocationType allocType,
5273 bool canMakeOtherLost,
5274 VmaAllocationRequest* pAllocationRequest)
5276 VMA_ASSERT(allocSize > 0);
5277 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5278 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5279 VMA_HEAVY_ASSERT(Validate());
5282 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5288 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5289 if(freeSuballocCount > 0)
5294 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5295 m_FreeSuballocationsBySize.data(),
5296 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5298 VmaSuballocationItemSizeLess());
5299 size_t index = it - m_FreeSuballocationsBySize.data();
5300 for(; index < freeSuballocCount; ++index)
5305 bufferImageGranularity,
5309 m_FreeSuballocationsBySize[index],
5311 &pAllocationRequest->offset,
5312 &pAllocationRequest->itemsToMakeLostCount,
5313 &pAllocationRequest->sumFreeSize,
5314 &pAllocationRequest->sumItemSize))
5316 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5324 for(
size_t index = freeSuballocCount; index--; )
5329 bufferImageGranularity,
5333 m_FreeSuballocationsBySize[index],
5335 &pAllocationRequest->offset,
5336 &pAllocationRequest->itemsToMakeLostCount,
5337 &pAllocationRequest->sumFreeSize,
5338 &pAllocationRequest->sumItemSize))
5340 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5347 if(canMakeOtherLost)
5351 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5352 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5354 VmaAllocationRequest tmpAllocRequest = {};
5355 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5356 suballocIt != m_Suballocations.end();
5359 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5360 suballocIt->hAllocation->CanBecomeLost())
5365 bufferImageGranularity,
5371 &tmpAllocRequest.offset,
5372 &tmpAllocRequest.itemsToMakeLostCount,
5373 &tmpAllocRequest.sumFreeSize,
5374 &tmpAllocRequest.sumItemSize))
5376 tmpAllocRequest.item = suballocIt;
5378 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5380 *pAllocationRequest = tmpAllocRequest;
5386 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5395 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5396 uint32_t currentFrameIndex,
5397 uint32_t frameInUseCount,
5398 VmaAllocationRequest* pAllocationRequest)
5400 while(pAllocationRequest->itemsToMakeLostCount > 0)
5402 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5404 ++pAllocationRequest->item;
5406 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5407 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5408 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5409 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5411 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5412 --pAllocationRequest->itemsToMakeLostCount;
5420 VMA_HEAVY_ASSERT(Validate());
5421 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5422 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5427 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5429 uint32_t lostAllocationCount = 0;
5430 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5431 it != m_Suballocations.end();
5434 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5435 it->hAllocation->CanBecomeLost() &&
5436 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5438 it = FreeSuballocation(it);
5439 ++lostAllocationCount;
5442 return lostAllocationCount;
5445 void VmaBlockMetadata::Alloc(
5446 const VmaAllocationRequest& request,
5447 VmaSuballocationType type,
5448 VkDeviceSize allocSize,
5449 VmaAllocation hAllocation)
5451 VMA_ASSERT(request.item != m_Suballocations.end());
5452 VmaSuballocation& suballoc = *request.item;
5454 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5456 VMA_ASSERT(request.offset >= suballoc.offset);
5457 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5458 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5459 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5463 UnregisterFreeSuballocation(request.item);
5465 suballoc.offset = request.offset;
5466 suballoc.size = allocSize;
5467 suballoc.type = type;
5468 suballoc.hAllocation = hAllocation;
5473 VmaSuballocation paddingSuballoc = {};
5474 paddingSuballoc.offset = request.offset + allocSize;
5475 paddingSuballoc.size = paddingEnd;
5476 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5477 VmaSuballocationList::iterator next = request.item;
5479 const VmaSuballocationList::iterator paddingEndItem =
5480 m_Suballocations.insert(next, paddingSuballoc);
5481 RegisterFreeSuballocation(paddingEndItem);
5487 VmaSuballocation paddingSuballoc = {};
5488 paddingSuballoc.offset = request.offset - paddingBegin;
5489 paddingSuballoc.size = paddingBegin;
5490 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5491 const VmaSuballocationList::iterator paddingBeginItem =
5492 m_Suballocations.insert(request.item, paddingSuballoc);
5493 RegisterFreeSuballocation(paddingBeginItem);
5497 m_FreeCount = m_FreeCount - 1;
5498 if(paddingBegin > 0)
5506 m_SumFreeSize -= allocSize;
5509 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5511 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5512 suballocItem != m_Suballocations.end();
5515 VmaSuballocation& suballoc = *suballocItem;
5516 if(suballoc.hAllocation == allocation)
5518 FreeSuballocation(suballocItem);
5519 VMA_HEAVY_ASSERT(Validate());
5523 VMA_ASSERT(0 &&
"Not found!");
5526 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5528 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5529 suballocItem != m_Suballocations.end();
5532 VmaSuballocation& suballoc = *suballocItem;
5533 if(suballoc.offset == offset)
5535 FreeSuballocation(suballocItem);
5539 VMA_ASSERT(0 &&
"Not found!");
5542 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5544 VkDeviceSize lastSize = 0;
5545 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5547 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5549 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5554 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5559 if(it->size < lastSize)
5565 lastSize = it->size;
5570 bool VmaBlockMetadata::CheckAllocation(
5571 uint32_t currentFrameIndex,
5572 uint32_t frameInUseCount,
5573 VkDeviceSize bufferImageGranularity,
5574 VkDeviceSize allocSize,
5575 VkDeviceSize allocAlignment,
5576 VmaSuballocationType allocType,
5577 VmaSuballocationList::const_iterator suballocItem,
5578 bool canMakeOtherLost,
5579 VkDeviceSize* pOffset,
5580 size_t* itemsToMakeLostCount,
5581 VkDeviceSize* pSumFreeSize,
5582 VkDeviceSize* pSumItemSize)
const 5584 VMA_ASSERT(allocSize > 0);
5585 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5586 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5587 VMA_ASSERT(pOffset != VMA_NULL);
5589 *itemsToMakeLostCount = 0;
5593 if(canMakeOtherLost)
5595 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5597 *pSumFreeSize = suballocItem->size;
5601 if(suballocItem->hAllocation->CanBecomeLost() &&
5602 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5604 ++*itemsToMakeLostCount;
5605 *pSumItemSize = suballocItem->size;
5614 if(m_Size - suballocItem->offset < allocSize)
5620 *pOffset = suballocItem->offset;
5623 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5625 *pOffset += VMA_DEBUG_MARGIN;
5629 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5630 *pOffset = VmaAlignUp(*pOffset, alignment);
5634 if(bufferImageGranularity > 1)
5636 bool bufferImageGranularityConflict =
false;
5637 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5638 while(prevSuballocItem != m_Suballocations.cbegin())
5641 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5642 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5644 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5646 bufferImageGranularityConflict =
true;
5654 if(bufferImageGranularityConflict)
5656 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5662 if(*pOffset >= suballocItem->offset + suballocItem->size)
5668 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5671 VmaSuballocationList::const_iterator next = suballocItem;
5673 const VkDeviceSize requiredEndMargin =
5674 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5676 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5678 if(suballocItem->offset + totalSize > m_Size)
5685 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5686 if(totalSize > suballocItem->size)
5688 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5689 while(remainingSize > 0)
5692 if(lastSuballocItem == m_Suballocations.cend())
5696 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5698 *pSumFreeSize += lastSuballocItem->size;
5702 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5703 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5704 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5706 ++*itemsToMakeLostCount;
5707 *pSumItemSize += lastSuballocItem->size;
5714 remainingSize = (lastSuballocItem->size < remainingSize) ?
5715 remainingSize - lastSuballocItem->size : 0;
5721 if(bufferImageGranularity > 1)
5723 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5725 while(nextSuballocItem != m_Suballocations.cend())
5727 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5728 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5730 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5732 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5733 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5734 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5736 ++*itemsToMakeLostCount;
5755 const VmaSuballocation& suballoc = *suballocItem;
5756 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5758 *pSumFreeSize = suballoc.size;
5761 if(suballoc.size < allocSize)
5767 *pOffset = suballoc.offset;
5770 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5772 *pOffset += VMA_DEBUG_MARGIN;
5776 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5777 *pOffset = VmaAlignUp(*pOffset, alignment);
5781 if(bufferImageGranularity > 1)
5783 bool bufferImageGranularityConflict =
false;
5784 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5785 while(prevSuballocItem != m_Suballocations.cbegin())
5788 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5789 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5791 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5793 bufferImageGranularityConflict =
true;
5801 if(bufferImageGranularityConflict)
5803 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5808 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5811 VmaSuballocationList::const_iterator next = suballocItem;
5813 const VkDeviceSize requiredEndMargin =
5814 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5817 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5824 if(bufferImageGranularity > 1)
5826 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5828 while(nextSuballocItem != m_Suballocations.cend())
5830 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5831 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5833 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5852 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5854 VMA_ASSERT(item != m_Suballocations.end());
5855 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5857 VmaSuballocationList::iterator nextItem = item;
5859 VMA_ASSERT(nextItem != m_Suballocations.end());
5860 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5862 item->size += nextItem->size;
5864 m_Suballocations.erase(nextItem);
5867 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5870 VmaSuballocation& suballoc = *suballocItem;
5871 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5872 suballoc.hAllocation = VK_NULL_HANDLE;
5876 m_SumFreeSize += suballoc.size;
5879 bool mergeWithNext =
false;
5880 bool mergeWithPrev =
false;
5882 VmaSuballocationList::iterator nextItem = suballocItem;
5884 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5886 mergeWithNext =
true;
5889 VmaSuballocationList::iterator prevItem = suballocItem;
5890 if(suballocItem != m_Suballocations.begin())
5893 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5895 mergeWithPrev =
true;
5901 UnregisterFreeSuballocation(nextItem);
5902 MergeFreeWithNext(suballocItem);
5907 UnregisterFreeSuballocation(prevItem);
5908 MergeFreeWithNext(prevItem);
5909 RegisterFreeSuballocation(prevItem);
5914 RegisterFreeSuballocation(suballocItem);
5915 return suballocItem;
5919 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5921 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5922 VMA_ASSERT(item->size > 0);
5926 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5928 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5930 if(m_FreeSuballocationsBySize.empty())
5932 m_FreeSuballocationsBySize.push_back(item);
5936 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5944 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5946 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5947 VMA_ASSERT(item->size > 0);
5951 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5953 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5955 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5956 m_FreeSuballocationsBySize.data(),
5957 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5959 VmaSuballocationItemSizeLess());
5960 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5961 index < m_FreeSuballocationsBySize.size();
5964 if(m_FreeSuballocationsBySize[index] == item)
5966 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5969 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5971 VMA_ASSERT(0 &&
"Not found.");
5980 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5982 m_pMappedData(VMA_NULL)
5986 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5988 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5991 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData)
5998 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6001 m_MapCount += count;
6002 VMA_ASSERT(m_pMappedData != VMA_NULL);
6003 if(ppData != VMA_NULL)
6005 *ppData = m_pMappedData;
6011 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6012 hAllocator->m_hDevice,
6018 if(result == VK_SUCCESS)
6020 if(ppData != VMA_NULL)
6022 *ppData = m_pMappedData;
6030 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count)
6037 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6038 if(m_MapCount >= count)
6040 m_MapCount -= count;
6043 m_pMappedData = VMA_NULL;
6044 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
6049 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6056 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
6057 m_MemoryTypeIndex(UINT32_MAX),
6058 m_hMemory(VK_NULL_HANDLE),
6059 m_Metadata(hAllocator)
6063 void VmaDeviceMemoryBlock::Init(
6064 uint32_t newMemoryTypeIndex,
6065 VkDeviceMemory newMemory,
6066 VkDeviceSize newSize)
6068 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6070 m_MemoryTypeIndex = newMemoryTypeIndex;
6071 m_hMemory = newMemory;
6073 m_Metadata.Init(newSize);
6076 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
6080 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6082 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6083 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6084 m_hMemory = VK_NULL_HANDLE;
6087 bool VmaDeviceMemoryBlock::Validate()
const 6089 if((m_hMemory == VK_NULL_HANDLE) ||
6090 (m_Metadata.GetSize() == 0))
6095 return m_Metadata.Validate();
6098 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count,
void** ppData)
6100 return m_Mapping.Map(hAllocator, m_hMemory, count, ppData);
6103 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
6105 m_Mapping.Unmap(hAllocator, m_hMemory, count);
6110 memset(&outInfo, 0,
sizeof(outInfo));
6129 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6137 VmaPool_T::VmaPool_T(
6138 VmaAllocator hAllocator,
6142 createInfo.memoryTypeIndex,
6143 createInfo.blockSize,
6144 createInfo.minBlockCount,
6145 createInfo.maxBlockCount,
6147 createInfo.frameInUseCount,
6152 VmaPool_T::~VmaPool_T()
6156 #if VMA_STATS_STRING_ENABLED 6158 #endif // #if VMA_STATS_STRING_ENABLED 6160 VmaBlockVector::VmaBlockVector(
6161 VmaAllocator hAllocator,
6162 uint32_t memoryTypeIndex,
6163 VkDeviceSize preferredBlockSize,
6164 size_t minBlockCount,
6165 size_t maxBlockCount,
6166 VkDeviceSize bufferImageGranularity,
6167 uint32_t frameInUseCount,
6168 bool isCustomPool) :
6169 m_hAllocator(hAllocator),
6170 m_MemoryTypeIndex(memoryTypeIndex),
6171 m_PreferredBlockSize(preferredBlockSize),
6172 m_MinBlockCount(minBlockCount),
6173 m_MaxBlockCount(maxBlockCount),
6174 m_BufferImageGranularity(bufferImageGranularity),
6175 m_FrameInUseCount(frameInUseCount),
6176 m_IsCustomPool(isCustomPool),
6177 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6178 m_HasEmptyBlock(false),
6179 m_pDefragmentator(VMA_NULL)
6183 VmaBlockVector::~VmaBlockVector()
6185 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6187 for(
size_t i = m_Blocks.size(); i--; )
6189 m_Blocks[i]->Destroy(m_hAllocator);
6190 vma_delete(m_hAllocator, m_Blocks[i]);
6194 VkResult VmaBlockVector::CreateMinBlocks()
6196 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6198 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6199 if(res != VK_SUCCESS)
6207 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6215 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6217 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6219 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6221 VMA_HEAVY_ASSERT(pBlock->Validate());
6222 pBlock->m_Metadata.AddPoolStats(*pStats);
6226 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6228 VkResult VmaBlockVector::Allocate(
6229 VmaPool hCurrentPool,
6230 uint32_t currentFrameIndex,
6231 const VkMemoryRequirements& vkMemReq,
6233 VmaSuballocationType suballocType,
6234 VmaAllocation* pAllocation)
6239 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6243 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6245 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6246 VMA_ASSERT(pCurrBlock);
6247 VmaAllocationRequest currRequest = {};
6248 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6251 m_BufferImageGranularity,
6259 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6263 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6264 if(res != VK_SUCCESS)
6271 if(pCurrBlock->m_Metadata.IsEmpty())
6273 m_HasEmptyBlock =
false;
6276 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6277 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6278 (*pAllocation)->InitBlockAllocation(
6287 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6288 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6289 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6294 const bool canCreateNewBlock =
6296 (m_Blocks.size() < m_MaxBlockCount);
6299 if(canCreateNewBlock)
6302 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6303 uint32_t newBlockSizeShift = 0;
6304 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6308 if(m_IsCustomPool ==
false)
6311 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6312 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6314 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6315 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6317 newBlockSize = smallerNewBlockSize;
6318 ++newBlockSizeShift;
6327 size_t newBlockIndex = 0;
6328 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6330 if(m_IsCustomPool ==
false)
6332 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6334 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6335 if(smallerNewBlockSize >= vkMemReq.size)
6337 newBlockSize = smallerNewBlockSize;
6338 ++newBlockSizeShift;
6339 res = CreateBlock(newBlockSize, &newBlockIndex);
6348 if(res == VK_SUCCESS)
6350 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6351 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6355 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6356 if(res != VK_SUCCESS)
6363 VmaAllocationRequest allocRequest;
6364 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6365 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6366 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6367 (*pAllocation)->InitBlockAllocation(
6370 allocRequest.offset,
6376 VMA_HEAVY_ASSERT(pBlock->Validate());
6377 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6378 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6386 if(canMakeOtherLost)
6388 uint32_t tryIndex = 0;
6389 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6391 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6392 VmaAllocationRequest bestRequest = {};
6393 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6397 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6399 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6400 VMA_ASSERT(pCurrBlock);
6401 VmaAllocationRequest currRequest = {};
6402 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6405 m_BufferImageGranularity,
6412 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6413 if(pBestRequestBlock == VMA_NULL ||
6414 currRequestCost < bestRequestCost)
6416 pBestRequestBlock = pCurrBlock;
6417 bestRequest = currRequest;
6418 bestRequestCost = currRequestCost;
6420 if(bestRequestCost == 0)
6428 if(pBestRequestBlock != VMA_NULL)
6432 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6433 if(res != VK_SUCCESS)
6439 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6445 if(pBestRequestBlock->m_Metadata.IsEmpty())
6447 m_HasEmptyBlock =
false;
6450 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6451 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6452 (*pAllocation)->InitBlockAllocation(
6461 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6462 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6463 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6477 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6479 return VK_ERROR_TOO_MANY_OBJECTS;
6483 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6486 void VmaBlockVector::Free(
6487 VmaAllocation hAllocation)
6489 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6493 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6495 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6497 if(hAllocation->IsPersistentMap())
6499 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory, 1);
6502 pBlock->m_Metadata.Free(hAllocation);
6503 VMA_HEAVY_ASSERT(pBlock->Validate());
6505 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6508 if(pBlock->m_Metadata.IsEmpty())
6511 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6513 pBlockToDelete = pBlock;
6519 m_HasEmptyBlock =
true;
6524 else if(m_HasEmptyBlock)
6526 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6527 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6529 pBlockToDelete = pLastBlock;
6530 m_Blocks.pop_back();
6531 m_HasEmptyBlock =
false;
6535 IncrementallySortBlocks();
6540 if(pBlockToDelete != VMA_NULL)
6542 VMA_DEBUG_LOG(
" Deleted empty allocation");
6543 pBlockToDelete->Destroy(m_hAllocator);
6544 vma_delete(m_hAllocator, pBlockToDelete);
6548 size_t VmaBlockVector::CalcMaxBlockSize()
const 6551 for(
size_t i = m_Blocks.size(); i--; )
6553 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6554 if(result >= m_PreferredBlockSize)
6562 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6564 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6566 if(m_Blocks[blockIndex] == pBlock)
6568 VmaVectorRemove(m_Blocks, blockIndex);
6575 void VmaBlockVector::IncrementallySortBlocks()
6578 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6580 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6582 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6588 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6590 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6591 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6592 allocInfo.allocationSize = blockSize;
6593 VkDeviceMemory mem = VK_NULL_HANDLE;
6594 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6603 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6607 allocInfo.allocationSize);
6609 m_Blocks.push_back(pBlock);
6610 if(pNewBlockIndex != VMA_NULL)
6612 *pNewBlockIndex = m_Blocks.size() - 1;
6618 #if VMA_STATS_STRING_ENABLED 6620 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6622 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6628 json.WriteString(
"MemoryTypeIndex");
6629 json.WriteNumber(m_MemoryTypeIndex);
6631 json.WriteString(
"BlockSize");
6632 json.WriteNumber(m_PreferredBlockSize);
6634 json.WriteString(
"BlockCount");
6635 json.BeginObject(
true);
6636 if(m_MinBlockCount > 0)
6638 json.WriteString(
"Min");
6639 json.WriteNumber((uint64_t)m_MinBlockCount);
6641 if(m_MaxBlockCount < SIZE_MAX)
6643 json.WriteString(
"Max");
6644 json.WriteNumber((uint64_t)m_MaxBlockCount);
6646 json.WriteString(
"Cur");
6647 json.WriteNumber((uint64_t)m_Blocks.size());
6650 if(m_FrameInUseCount > 0)
6652 json.WriteString(
"FrameInUseCount");
6653 json.WriteNumber(m_FrameInUseCount);
6658 json.WriteString(
"PreferredBlockSize");
6659 json.WriteNumber(m_PreferredBlockSize);
6662 json.WriteString(
"Blocks");
6664 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6666 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6673 #endif // #if VMA_STATS_STRING_ENABLED 6675 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6676 VmaAllocator hAllocator,
6677 uint32_t currentFrameIndex)
6679 if(m_pDefragmentator == VMA_NULL)
6681 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6687 return m_pDefragmentator;
6690 VkResult VmaBlockVector::Defragment(
6692 VkDeviceSize& maxBytesToMove,
6693 uint32_t& maxAllocationsToMove)
6695 if(m_pDefragmentator == VMA_NULL)
6700 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6703 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6706 if(pDefragmentationStats != VMA_NULL)
6708 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6709 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6712 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6713 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6719 m_HasEmptyBlock =
false;
6720 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6722 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6723 if(pBlock->m_Metadata.IsEmpty())
6725 if(m_Blocks.size() > m_MinBlockCount)
6727 if(pDefragmentationStats != VMA_NULL)
6730 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6733 VmaVectorRemove(m_Blocks, blockIndex);
6734 pBlock->Destroy(m_hAllocator);
6735 vma_delete(m_hAllocator, pBlock);
6739 m_HasEmptyBlock =
true;
6747 void VmaBlockVector::DestroyDefragmentator()
6749 if(m_pDefragmentator != VMA_NULL)
6751 vma_delete(m_hAllocator, m_pDefragmentator);
6752 m_pDefragmentator = VMA_NULL;
6756 void VmaBlockVector::MakePoolAllocationsLost(
6757 uint32_t currentFrameIndex,
6758 size_t* pLostAllocationCount)
6760 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6761 size_t lostAllocationCount = 0;
6762 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6764 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6766 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6768 if(pLostAllocationCount != VMA_NULL)
6770 *pLostAllocationCount = lostAllocationCount;
6774 void VmaBlockVector::AddStats(
VmaStats* pStats)
6776 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6777 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6779 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6781 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6783 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6785 VMA_HEAVY_ASSERT(pBlock->Validate());
6787 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6788 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6789 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6790 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6797 VmaDefragmentator::VmaDefragmentator(
6798 VmaAllocator hAllocator,
6799 VmaBlockVector* pBlockVector,
6800 uint32_t currentFrameIndex) :
6801 m_hAllocator(hAllocator),
6802 m_pBlockVector(pBlockVector),
6803 m_CurrentFrameIndex(currentFrameIndex),
6805 m_AllocationsMoved(0),
6806 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6807 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6811 VmaDefragmentator::~VmaDefragmentator()
6813 for(
size_t i = m_Blocks.size(); i--; )
6815 vma_delete(m_hAllocator, m_Blocks[i]);
6819 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6821 AllocationInfo allocInfo;
6822 allocInfo.m_hAllocation = hAlloc;
6823 allocInfo.m_pChanged = pChanged;
6824 m_Allocations.push_back(allocInfo);
6827 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6830 if(m_pMappedDataForDefragmentation)
6832 *ppMappedData = m_pMappedDataForDefragmentation;
6837 if(m_pBlock->m_Mapping.GetMappedData())
6839 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6844 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
6845 *ppMappedData = m_pMappedDataForDefragmentation;
6849 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6851 if(m_pMappedDataForDefragmentation != VMA_NULL)
6853 m_pBlock->Unmap(hAllocator, 1);
6857 VkResult VmaDefragmentator::DefragmentRound(
6858 VkDeviceSize maxBytesToMove,
6859 uint32_t maxAllocationsToMove)
6861 if(m_Blocks.empty())
6866 size_t srcBlockIndex = m_Blocks.size() - 1;
6867 size_t srcAllocIndex = SIZE_MAX;
6873 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6875 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6878 if(srcBlockIndex == 0)
6885 srcAllocIndex = SIZE_MAX;
6890 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6894 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6895 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6897 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6898 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6899 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6900 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6903 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6905 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6906 VmaAllocationRequest dstAllocRequest;
6907 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6908 m_CurrentFrameIndex,
6909 m_pBlockVector->GetFrameInUseCount(),
6910 m_pBlockVector->GetBufferImageGranularity(),
6915 &dstAllocRequest) &&
6917 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6919 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6922 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6923 (m_BytesMoved + size > maxBytesToMove))
6925 return VK_INCOMPLETE;
6928 void* pDstMappedData = VMA_NULL;
6929 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6930 if(res != VK_SUCCESS)
6935 void* pSrcMappedData = VMA_NULL;
6936 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6937 if(res != VK_SUCCESS)
6944 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6945 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6946 static_cast<size_t>(size));
6948 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6949 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
6951 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6953 if(allocInfo.m_pChanged != VMA_NULL)
6955 *allocInfo.m_pChanged = VK_TRUE;
6958 ++m_AllocationsMoved;
6959 m_BytesMoved += size;
6961 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6969 if(srcAllocIndex > 0)
6975 if(srcBlockIndex > 0)
6978 srcAllocIndex = SIZE_MAX;
6988 VkResult VmaDefragmentator::Defragment(
6989 VkDeviceSize maxBytesToMove,
6990 uint32_t maxAllocationsToMove)
6992 if(m_Allocations.empty())
6998 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6999 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7001 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7002 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7003 m_Blocks.push_back(pBlockInfo);
7007 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7010 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7012 AllocationInfo& allocInfo = m_Allocations[blockIndex];
7014 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7016 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7017 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7018 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7020 (*it)->m_Allocations.push_back(allocInfo);
7028 m_Allocations.clear();
7030 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7032 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7033 pBlockInfo->CalcHasNonMovableAllocations();
7034 pBlockInfo->SortAllocationsBySizeDescecnding();
7038 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7041 VkResult result = VK_SUCCESS;
7042 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7044 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7048 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7050 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7056 bool VmaDefragmentator::MoveMakesSense(
7057 size_t dstBlockIndex, VkDeviceSize dstOffset,
7058 size_t srcBlockIndex, VkDeviceSize srcOffset)
7060 if(dstBlockIndex < srcBlockIndex)
7064 if(dstBlockIndex > srcBlockIndex)
7068 if(dstOffset < srcOffset)
7081 m_hDevice(pCreateInfo->device),
7082 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7083 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7084 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7085 m_PreferredLargeHeapBlockSize(0),
7086 m_PhysicalDevice(pCreateInfo->physicalDevice),
7087 m_CurrentFrameIndex(0),
7088 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
7092 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7093 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7094 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7096 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7097 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7099 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7101 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7112 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7113 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7120 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7122 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7123 if(limit != VK_WHOLE_SIZE)
7125 m_HeapSizeLimit[heapIndex] = limit;
7126 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7128 m_MemProps.memoryHeaps[heapIndex].size = limit;
7134 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7136 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7138 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7144 GetBufferImageGranularity(),
7149 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7153 VmaAllocator_T::~VmaAllocator_T()
7155 VMA_ASSERT(m_Pools.empty());
7157 for(
size_t i = GetMemoryTypeCount(); i--; )
7159 vma_delete(
this, m_pDedicatedAllocations[i]);
7160 vma_delete(
this, m_pBlockVectors[i]);
7164 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7166 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7167 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7168 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7169 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7170 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7171 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7172 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7173 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7174 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7175 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7176 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7177 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7178 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7179 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7180 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7181 if(m_UseKhrDedicatedAllocation)
7183 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7184 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7185 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7186 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7188 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7190 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7191 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7193 if(pVulkanFunctions != VMA_NULL)
7195 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7196 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7197 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7198 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7199 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7200 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7201 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7202 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7203 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7204 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7205 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7206 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7207 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7208 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7209 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7210 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7213 #undef VMA_COPY_IF_NOT_NULL 7217 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7218 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7219 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7220 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7221 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7222 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7223 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7224 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7225 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7226 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7227 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7228 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7229 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7230 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7231 if(m_UseKhrDedicatedAllocation)
7233 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7234 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7238 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7240 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7241 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7242 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7243 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7246 VkResult VmaAllocator_T::AllocateMemoryOfType(
7247 const VkMemoryRequirements& vkMemReq,
7248 bool dedicatedAllocation,
7249 VkBuffer dedicatedBuffer,
7250 VkImage dedicatedImage,
7252 uint32_t memTypeIndex,
7253 VmaSuballocationType suballocType,
7254 VmaAllocation* pAllocation)
7256 VMA_ASSERT(pAllocation != VMA_NULL);
7257 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7263 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7268 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7269 VMA_ASSERT(blockVector);
7271 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7272 bool preferDedicatedMemory =
7273 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7274 dedicatedAllocation ||
7276 vkMemReq.size > preferredBlockSize / 2;
7278 if(preferDedicatedMemory &&
7280 finalCreateInfo.
pool == VK_NULL_HANDLE)
7289 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7293 return AllocateDedicatedMemory(
7307 VkResult res = blockVector->Allocate(
7309 m_CurrentFrameIndex.load(),
7314 if(res == VK_SUCCESS)
7322 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7326 res = AllocateDedicatedMemory(
7332 finalCreateInfo.pUserData,
7336 if(res == VK_SUCCESS)
7339 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7345 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7352 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7354 VmaSuballocationType suballocType,
7355 uint32_t memTypeIndex,
7357 bool isUserDataString,
7359 VkBuffer dedicatedBuffer,
7360 VkImage dedicatedImage,
7361 VmaAllocation* pAllocation)
7363 VMA_ASSERT(pAllocation);
7365 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7366 allocInfo.memoryTypeIndex = memTypeIndex;
7367 allocInfo.allocationSize = size;
7369 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7370 if(m_UseKhrDedicatedAllocation)
7372 if(dedicatedBuffer != VK_NULL_HANDLE)
7374 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7375 dedicatedAllocInfo.buffer = dedicatedBuffer;
7376 allocInfo.pNext = &dedicatedAllocInfo;
7378 else if(dedicatedImage != VK_NULL_HANDLE)
7380 dedicatedAllocInfo.image = dedicatedImage;
7381 allocInfo.pNext = &dedicatedAllocInfo;
7386 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7387 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7390 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7394 void* pMappedData = VMA_NULL;
7397 res = (*m_VulkanFunctions.vkMapMemory)(
7406 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7407 FreeVulkanMemory(memTypeIndex, size, hMemory);
7412 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7413 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7414 (*pAllocation)->SetUserData(
this, pUserData);
7418 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7419 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7420 VMA_ASSERT(pDedicatedAllocations);
7421 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7424 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7429 void VmaAllocator_T::GetBufferMemoryRequirements(
7431 VkMemoryRequirements& memReq,
7432 bool& requiresDedicatedAllocation,
7433 bool& prefersDedicatedAllocation)
const 7435 if(m_UseKhrDedicatedAllocation)
7437 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7438 memReqInfo.buffer = hBuffer;
7440 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7442 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7443 memReq2.pNext = &memDedicatedReq;
7445 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7447 memReq = memReq2.memoryRequirements;
7448 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7449 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7453 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7454 requiresDedicatedAllocation =
false;
7455 prefersDedicatedAllocation =
false;
7459 void VmaAllocator_T::GetImageMemoryRequirements(
7461 VkMemoryRequirements& memReq,
7462 bool& requiresDedicatedAllocation,
7463 bool& prefersDedicatedAllocation)
const 7465 if(m_UseKhrDedicatedAllocation)
7467 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7468 memReqInfo.image = hImage;
7470 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7472 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7473 memReq2.pNext = &memDedicatedReq;
7475 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7477 memReq = memReq2.memoryRequirements;
7478 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7479 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7483 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7484 requiresDedicatedAllocation =
false;
7485 prefersDedicatedAllocation =
false;
7489 VkResult VmaAllocator_T::AllocateMemory(
7490 const VkMemoryRequirements& vkMemReq,
7491 bool requiresDedicatedAllocation,
7492 bool prefersDedicatedAllocation,
7493 VkBuffer dedicatedBuffer,
7494 VkImage dedicatedImage,
7496 VmaSuballocationType suballocType,
7497 VmaAllocation* pAllocation)
7502 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7503 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7508 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7509 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7511 if(requiresDedicatedAllocation)
7515 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7516 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7518 if(createInfo.
pool != VK_NULL_HANDLE)
7520 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7521 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7524 if((createInfo.
pool != VK_NULL_HANDLE) &&
7527 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7528 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7531 if(createInfo.
pool != VK_NULL_HANDLE)
7533 return createInfo.
pool->m_BlockVector.Allocate(
7535 m_CurrentFrameIndex.load(),
7544 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7545 uint32_t memTypeIndex = UINT32_MAX;
7547 if(res == VK_SUCCESS)
7549 res = AllocateMemoryOfType(
7551 requiresDedicatedAllocation || prefersDedicatedAllocation,
7559 if(res == VK_SUCCESS)
7569 memoryTypeBits &= ~(1u << memTypeIndex);
7572 if(res == VK_SUCCESS)
7574 res = AllocateMemoryOfType(
7576 requiresDedicatedAllocation || prefersDedicatedAllocation,
7584 if(res == VK_SUCCESS)
7594 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7605 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7607 VMA_ASSERT(allocation);
7609 if(allocation->CanBecomeLost() ==
false ||
7610 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7612 switch(allocation->GetType())
7614 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7616 VmaBlockVector* pBlockVector = VMA_NULL;
7617 VmaPool hPool = allocation->GetPool();
7618 if(hPool != VK_NULL_HANDLE)
7620 pBlockVector = &hPool->m_BlockVector;
7624 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7625 pBlockVector = m_pBlockVectors[memTypeIndex];
7627 pBlockVector->Free(allocation);
7630 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7631 FreeDedicatedMemory(allocation);
7638 allocation->SetUserData(
this, VMA_NULL);
7639 vma_delete(
this, allocation);
7642 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7645 InitStatInfo(pStats->
total);
7646 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7648 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7652 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7654 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7655 VMA_ASSERT(pBlockVector);
7656 pBlockVector->AddStats(pStats);
7661 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7662 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7664 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7669 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7671 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7672 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7673 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7674 VMA_ASSERT(pDedicatedAllocVector);
7675 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7678 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7679 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7680 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7681 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7686 VmaPostprocessCalcStatInfo(pStats->
total);
7687 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7688 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7689 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7690 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7693 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7695 VkResult VmaAllocator_T::Defragment(
7696 VmaAllocation* pAllocations,
7697 size_t allocationCount,
7698 VkBool32* pAllocationsChanged,
7702 if(pAllocationsChanged != VMA_NULL)
7704 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7706 if(pDefragmentationStats != VMA_NULL)
7708 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7711 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7713 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7715 const size_t poolCount = m_Pools.size();
7718 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7720 VmaAllocation hAlloc = pAllocations[allocIndex];
7722 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7724 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7726 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7728 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7730 VmaBlockVector* pAllocBlockVector = VMA_NULL;
7732 const VmaPool hAllocPool = hAlloc->GetPool();
7734 if(hAllocPool != VK_NULL_HANDLE)
7736 pAllocBlockVector = &hAllocPool->GetBlockVector();
7741 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7744 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7746 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7747 &pAllocationsChanged[allocIndex] : VMA_NULL;
7748 pDefragmentator->AddAllocation(hAlloc, pChanged);
7752 VkResult result = VK_SUCCESS;
7756 VkDeviceSize maxBytesToMove = SIZE_MAX;
7757 uint32_t maxAllocationsToMove = UINT32_MAX;
7758 if(pDefragmentationInfo != VMA_NULL)
7765 for(uint32_t memTypeIndex = 0;
7766 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7770 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7772 result = m_pBlockVectors[memTypeIndex]->Defragment(
7773 pDefragmentationStats,
7775 maxAllocationsToMove);
7780 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7782 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7783 pDefragmentationStats,
7785 maxAllocationsToMove);
7791 for(
size_t poolIndex = poolCount; poolIndex--; )
7793 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7797 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7799 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7801 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7808 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7810 if(hAllocation->CanBecomeLost())
7816 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7817 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7820 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7824 pAllocationInfo->
offset = 0;
7825 pAllocationInfo->
size = hAllocation->GetSize();
7827 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7830 else if(localLastUseFrameIndex == localCurrFrameIndex)
7832 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7833 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7834 pAllocationInfo->
offset = hAllocation->GetOffset();
7835 pAllocationInfo->
size = hAllocation->GetSize();
7837 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7842 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7844 localLastUseFrameIndex = localCurrFrameIndex;
7851 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7852 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7853 pAllocationInfo->
offset = hAllocation->GetOffset();
7854 pAllocationInfo->
size = hAllocation->GetSize();
7855 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7856 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7860 bool VmaAllocator_T::TouchAllocation(VmaAllocation hAllocation)
7863 if(hAllocation->CanBecomeLost())
7865 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7866 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7869 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7873 else if(localLastUseFrameIndex == localCurrFrameIndex)
7879 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7881 localLastUseFrameIndex = localCurrFrameIndex;
7892 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7894 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7907 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7909 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7910 if(res != VK_SUCCESS)
7912 vma_delete(
this, *pPool);
7919 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7920 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7926 void VmaAllocator_T::DestroyPool(VmaPool pool)
7930 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7931 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7932 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7935 vma_delete(
this, pool);
7938 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7940 pool->m_BlockVector.GetPoolStats(pPoolStats);
7943 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7945 m_CurrentFrameIndex.store(frameIndex);
7948 void VmaAllocator_T::MakePoolAllocationsLost(
7950 size_t* pLostAllocationCount)
7952 hPool->m_BlockVector.MakePoolAllocationsLost(
7953 m_CurrentFrameIndex.load(),
7954 pLostAllocationCount);
7957 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7959 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7960 (*pAllocation)->InitLost();
7963 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7965 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7968 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7970 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7971 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7973 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7974 if(res == VK_SUCCESS)
7976 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7981 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7986 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7989 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7991 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7997 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7999 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
8001 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
8004 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8006 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8007 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8009 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8010 m_HeapSizeLimit[heapIndex] += size;
8014 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
8016 if(hAllocation->CanBecomeLost())
8018 return VK_ERROR_MEMORY_MAP_FAILED;
8021 switch(hAllocation->GetType())
8023 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8025 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8026 char *pBytes = VMA_NULL;
8027 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
8028 if(res == VK_SUCCESS)
8030 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8031 hAllocation->BlockAllocMap();
8035 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8036 return hAllocation->DedicatedAllocMap(
this, ppData);
8039 return VK_ERROR_MEMORY_MAP_FAILED;
8043 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
8045 switch(hAllocation->GetType())
8047 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8049 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8050 hAllocation->BlockAllocUnmap();
8051 pBlock->Unmap(
this, 1);
8054 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8055 hAllocation->DedicatedAllocUnmap(
this);
8062 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
8064 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8066 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8068 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8069 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8070 VMA_ASSERT(pDedicatedAllocations);
8071 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8072 VMA_ASSERT(success);
8075 VkDeviceMemory hMemory = allocation->GetMemory();
8077 if(allocation->GetMappedData() != VMA_NULL)
8079 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8082 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8084 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8087 #if VMA_STATS_STRING_ENABLED 8089 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8091 bool dedicatedAllocationsStarted =
false;
8092 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8094 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8095 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8096 VMA_ASSERT(pDedicatedAllocVector);
8097 if(pDedicatedAllocVector->empty() ==
false)
8099 if(dedicatedAllocationsStarted ==
false)
8101 dedicatedAllocationsStarted =
true;
8102 json.WriteString(
"DedicatedAllocations");
8106 json.BeginString(
"Type ");
8107 json.ContinueString(memTypeIndex);
8112 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8114 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
8115 json.BeginObject(
true);
8117 json.WriteString(
"Type");
8118 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
8120 json.WriteString(
"Size");
8121 json.WriteNumber(hAlloc->GetSize());
8123 const void* pUserData = hAlloc->GetUserData();
8124 if(pUserData != VMA_NULL)
8126 json.WriteString(
"UserData");
8127 if(hAlloc->IsUserDataString())
8129 json.WriteString((
const char*)pUserData);
8134 json.ContinueString_Pointer(pUserData);
8145 if(dedicatedAllocationsStarted)
8151 bool allocationsStarted =
false;
8152 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8154 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8156 if(allocationsStarted ==
false)
8158 allocationsStarted =
true;
8159 json.WriteString(
"DefaultPools");
8163 json.BeginString(
"Type ");
8164 json.ContinueString(memTypeIndex);
8167 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8170 if(allocationsStarted)
8177 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8178 const size_t poolCount = m_Pools.size();
8181 json.WriteString(
"Pools");
8183 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8185 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8192 #endif // #if VMA_STATS_STRING_ENABLED 8194 static VkResult AllocateMemoryForImage(
8195 VmaAllocator allocator,
8198 VmaSuballocationType suballocType,
8199 VmaAllocation* pAllocation)
8201 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8203 VkMemoryRequirements vkMemReq = {};
8204 bool requiresDedicatedAllocation =
false;
8205 bool prefersDedicatedAllocation =
false;
8206 allocator->GetImageMemoryRequirements(image, vkMemReq,
8207 requiresDedicatedAllocation, prefersDedicatedAllocation);
8209 return allocator->AllocateMemory(
8211 requiresDedicatedAllocation,
8212 prefersDedicatedAllocation,
8215 *pAllocationCreateInfo,
8225 VmaAllocator* pAllocator)
8227 VMA_ASSERT(pCreateInfo && pAllocator);
8228 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8234 VmaAllocator allocator)
8236 if(allocator != VK_NULL_HANDLE)
8238 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8239 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8240 vma_delete(&allocationCallbacks, allocator);
8245 VmaAllocator allocator,
8246 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8248 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8249 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8253 VmaAllocator allocator,
8254 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8256 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8257 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8261 VmaAllocator allocator,
8262 uint32_t memoryTypeIndex,
8263 VkMemoryPropertyFlags* pFlags)
8265 VMA_ASSERT(allocator && pFlags);
8266 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8267 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8271 VmaAllocator allocator,
8272 uint32_t frameIndex)
8274 VMA_ASSERT(allocator);
8275 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8277 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8279 allocator->SetCurrentFrameIndex(frameIndex);
8283 VmaAllocator allocator,
8286 VMA_ASSERT(allocator && pStats);
8287 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8288 allocator->CalculateStats(pStats);
8291 #if VMA_STATS_STRING_ENABLED 8294 VmaAllocator allocator,
8295 char** ppStatsString,
8296 VkBool32 detailedMap)
8298 VMA_ASSERT(allocator && ppStatsString);
8299 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8301 VmaStringBuilder sb(allocator);
8303 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8307 allocator->CalculateStats(&stats);
8309 json.WriteString(
"Total");
8310 VmaPrintStatInfo(json, stats.
total);
8312 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8314 json.BeginString(
"Heap ");
8315 json.ContinueString(heapIndex);
8319 json.WriteString(
"Size");
8320 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8322 json.WriteString(
"Flags");
8323 json.BeginArray(
true);
8324 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8326 json.WriteString(
"DEVICE_LOCAL");
8332 json.WriteString(
"Stats");
8333 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8336 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8338 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8340 json.BeginString(
"Type ");
8341 json.ContinueString(typeIndex);
8346 json.WriteString(
"Flags");
8347 json.BeginArray(
true);
8348 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8349 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8351 json.WriteString(
"DEVICE_LOCAL");
8353 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8355 json.WriteString(
"HOST_VISIBLE");
8357 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8359 json.WriteString(
"HOST_COHERENT");
8361 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8363 json.WriteString(
"HOST_CACHED");
8365 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8367 json.WriteString(
"LAZILY_ALLOCATED");
8373 json.WriteString(
"Stats");
8374 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8383 if(detailedMap == VK_TRUE)
8385 allocator->PrintDetailedMap(json);
8391 const size_t len = sb.GetLength();
8392 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8395 memcpy(pChars, sb.GetData(), len);
8398 *ppStatsString = pChars;
8402 VmaAllocator allocator,
8405 if(pStatsString != VMA_NULL)
8407 VMA_ASSERT(allocator);
8408 size_t len = strlen(pStatsString);
8409 vma_delete_array(allocator, pStatsString, len + 1);
8413 #endif // #if VMA_STATS_STRING_ENABLED 8419 VmaAllocator allocator,
8420 uint32_t memoryTypeBits,
8422 uint32_t* pMemoryTypeIndex)
8424 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8425 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8426 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8433 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8437 switch(pAllocationCreateInfo->
usage)
8442 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8445 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8448 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8449 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8452 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8453 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8459 *pMemoryTypeIndex = UINT32_MAX;
8460 uint32_t minCost = UINT32_MAX;
8461 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8462 memTypeIndex < allocator->GetMemoryTypeCount();
8463 ++memTypeIndex, memTypeBit <<= 1)
8466 if((memTypeBit & memoryTypeBits) != 0)
8468 const VkMemoryPropertyFlags currFlags =
8469 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8471 if((requiredFlags & ~currFlags) == 0)
8474 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8476 if(currCost < minCost)
8478 *pMemoryTypeIndex = memTypeIndex;
8488 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8492 VmaAllocator allocator,
8493 const VkBufferCreateInfo* pBufferCreateInfo,
8495 uint32_t* pMemoryTypeIndex)
8497 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8498 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8499 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8500 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8502 const VkDevice hDev = allocator->m_hDevice;
8503 VkBuffer hBuffer = VK_NULL_HANDLE;
8504 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
8505 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
8506 if(res == VK_SUCCESS)
8508 VkMemoryRequirements memReq = {};
8509 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
8510 hDev, hBuffer, &memReq);
8514 memReq.memoryTypeBits,
8515 pAllocationCreateInfo,
8518 allocator->GetVulkanFunctions().vkDestroyBuffer(
8519 hDev, hBuffer, allocator->GetAllocationCallbacks());
8525 VmaAllocator allocator,
8526 const VkImageCreateInfo* pImageCreateInfo,
8528 uint32_t* pMemoryTypeIndex)
8530 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8531 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
8532 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8533 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8535 const VkDevice hDev = allocator->m_hDevice;
8536 VkImage hImage = VK_NULL_HANDLE;
8537 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
8538 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
8539 if(res == VK_SUCCESS)
8541 VkMemoryRequirements memReq = {};
8542 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
8543 hDev, hImage, &memReq);
8547 memReq.memoryTypeBits,
8548 pAllocationCreateInfo,
8551 allocator->GetVulkanFunctions().vkDestroyImage(
8552 hDev, hImage, allocator->GetAllocationCallbacks());
8558 VmaAllocator allocator,
8562 VMA_ASSERT(allocator && pCreateInfo && pPool);
8564 VMA_DEBUG_LOG(
"vmaCreatePool");
8566 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8568 return allocator->CreatePool(pCreateInfo, pPool);
8572 VmaAllocator allocator,
8575 VMA_ASSERT(allocator);
8577 if(pool == VK_NULL_HANDLE)
8582 VMA_DEBUG_LOG(
"vmaDestroyPool");
8584 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8586 allocator->DestroyPool(pool);
8590 VmaAllocator allocator,
8594 VMA_ASSERT(allocator && pool && pPoolStats);
8596 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8598 allocator->GetPoolStats(pool, pPoolStats);
8602 VmaAllocator allocator,
8604 size_t* pLostAllocationCount)
8606 VMA_ASSERT(allocator && pool);
8608 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8610 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8614 VmaAllocator allocator,
8615 const VkMemoryRequirements* pVkMemoryRequirements,
8617 VmaAllocation* pAllocation,
8620 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8622 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8624 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8626 VkResult result = allocator->AllocateMemory(
8627 *pVkMemoryRequirements,
8633 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8636 if(pAllocationInfo && result == VK_SUCCESS)
8638 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8645 VmaAllocator allocator,
8648 VmaAllocation* pAllocation,
8651 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8653 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8655 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8657 VkMemoryRequirements vkMemReq = {};
8658 bool requiresDedicatedAllocation =
false;
8659 bool prefersDedicatedAllocation =
false;
8660 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8661 requiresDedicatedAllocation,
8662 prefersDedicatedAllocation);
8664 VkResult result = allocator->AllocateMemory(
8666 requiresDedicatedAllocation,
8667 prefersDedicatedAllocation,
8671 VMA_SUBALLOCATION_TYPE_BUFFER,
8674 if(pAllocationInfo && result == VK_SUCCESS)
8676 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8683 VmaAllocator allocator,
8686 VmaAllocation* pAllocation,
8689 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8691 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8693 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8695 VkResult result = AllocateMemoryForImage(
8699 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8702 if(pAllocationInfo && result == VK_SUCCESS)
8704 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8711 VmaAllocator allocator,
8712 VmaAllocation allocation)
8714 VMA_ASSERT(allocator && allocation);
8716 VMA_DEBUG_LOG(
"vmaFreeMemory");
8718 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8720 allocator->FreeMemory(allocation);
8724 VmaAllocator allocator,
8725 VmaAllocation allocation,
8728 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8730 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8732 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8736 VmaAllocator allocator,
8737 VmaAllocation allocation)
8739 VMA_ASSERT(allocator && allocation);
8741 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8743 return allocator->TouchAllocation(allocation);
8747 VmaAllocator allocator,
8748 VmaAllocation allocation,
8751 VMA_ASSERT(allocator && allocation);
8753 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8755 allocation->SetUserData(allocator, pUserData);
8759 VmaAllocator allocator,
8760 VmaAllocation* pAllocation)
8762 VMA_ASSERT(allocator && pAllocation);
8764 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8766 allocator->CreateLostAllocation(pAllocation);
8770 VmaAllocator allocator,
8771 VmaAllocation allocation,
8774 VMA_ASSERT(allocator && allocation && ppData);
8776 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8778 return allocator->Map(allocation, ppData);
8782 VmaAllocator allocator,
8783 VmaAllocation allocation)
8785 VMA_ASSERT(allocator && allocation);
8787 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8789 allocator->Unmap(allocation);
8793 VmaAllocator allocator,
8794 VmaAllocation* pAllocations,
8795 size_t allocationCount,
8796 VkBool32* pAllocationsChanged,
8800 VMA_ASSERT(allocator && pAllocations);
8802 VMA_DEBUG_LOG(
"vmaDefragment");
8804 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8806 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8810 VmaAllocator allocator,
8811 const VkBufferCreateInfo* pBufferCreateInfo,
8814 VmaAllocation* pAllocation,
8817 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8819 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8821 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8823 *pBuffer = VK_NULL_HANDLE;
8824 *pAllocation = VK_NULL_HANDLE;
8827 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8828 allocator->m_hDevice,
8830 allocator->GetAllocationCallbacks(),
8835 VkMemoryRequirements vkMemReq = {};
8836 bool requiresDedicatedAllocation =
false;
8837 bool prefersDedicatedAllocation =
false;
8838 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8839 requiresDedicatedAllocation, prefersDedicatedAllocation);
8843 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8845 VMA_ASSERT(vkMemReq.alignment %
8846 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8848 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8850 VMA_ASSERT(vkMemReq.alignment %
8851 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8853 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8855 VMA_ASSERT(vkMemReq.alignment %
8856 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8860 res = allocator->AllocateMemory(
8862 requiresDedicatedAllocation,
8863 prefersDedicatedAllocation,
8866 *pAllocationCreateInfo,
8867 VMA_SUBALLOCATION_TYPE_BUFFER,
8872 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8873 allocator->m_hDevice,
8875 (*pAllocation)->GetMemory(),
8876 (*pAllocation)->GetOffset());
8880 if(pAllocationInfo != VMA_NULL)
8882 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8886 allocator->FreeMemory(*pAllocation);
8887 *pAllocation = VK_NULL_HANDLE;
8888 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8889 *pBuffer = VK_NULL_HANDLE;
8892 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8893 *pBuffer = VK_NULL_HANDLE;
8900 VmaAllocator allocator,
8902 VmaAllocation allocation)
8904 if(buffer != VK_NULL_HANDLE)
8906 VMA_ASSERT(allocator);
8908 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8910 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8912 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8914 allocator->FreeMemory(allocation);
8919 VmaAllocator allocator,
8920 const VkImageCreateInfo* pImageCreateInfo,
8923 VmaAllocation* pAllocation,
8926 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8928 VMA_DEBUG_LOG(
"vmaCreateImage");
8930 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8932 *pImage = VK_NULL_HANDLE;
8933 *pAllocation = VK_NULL_HANDLE;
8936 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8937 allocator->m_hDevice,
8939 allocator->GetAllocationCallbacks(),
8943 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8944 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8945 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8948 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8952 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8953 allocator->m_hDevice,
8955 (*pAllocation)->GetMemory(),
8956 (*pAllocation)->GetOffset());
8960 if(pAllocationInfo != VMA_NULL)
8962 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8966 allocator->FreeMemory(*pAllocation);
8967 *pAllocation = VK_NULL_HANDLE;
8968 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8969 *pImage = VK_NULL_HANDLE;
8972 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8973 *pImage = VK_NULL_HANDLE;
8980 VmaAllocator allocator,
8982 VmaAllocation allocation)
8984 if(image != VK_NULL_HANDLE)
8986 VMA_ASSERT(allocator);
8988 VMA_DEBUG_LOG(
"vmaDestroyImage");
8990 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8992 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8994 allocator->FreeMemory(allocation);
8998 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:939
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1193
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:964
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:949
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1150
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:943
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1499
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:961
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1698
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1369
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1423
Definition: vk_mem_alloc.h:1230
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:932
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1268
Definition: vk_mem_alloc.h:1177
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:973
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1026
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:958
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1181
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1091
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:946
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1090
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:954
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1702
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:990
VmaStatInfo total
Definition: vk_mem_alloc.h:1100
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1710
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1252
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1693
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:947
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:874
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:967
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1377
Definition: vk_mem_alloc.h:1371
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1509
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:944
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1289
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1393
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1429
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:930
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1380
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1128
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1688
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1706
Definition: vk_mem_alloc.h:1167
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1276
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:945
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1096
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:880
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:901
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:906
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1708
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1263
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1439
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:940
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1079
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1388
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:893
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1237
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1092
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:897
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1383
Definition: vk_mem_alloc.h:1176
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1258
Definition: vk_mem_alloc.h:1249
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1082
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:942
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1401
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:976
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1432
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1247
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1282
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1014
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1098
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1217
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1091
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:951
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:895
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:950
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1415
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1523
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:970
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1091
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1088
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1420
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1504
Definition: vk_mem_alloc.h:1245
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1704
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:938
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:953
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1086
Definition: vk_mem_alloc.h:1133
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1373
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1084
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:948
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:952
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1204
Definition: vk_mem_alloc.h:1160
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1518
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:928
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:941
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1485
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1351
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1092
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1243
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1099
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1426
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1092
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1490