23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 943 #include <vulkan/vulkan.h> 945 VK_DEFINE_HANDLE(VmaAllocator)
949 VmaAllocator allocator,
951 VkDeviceMemory memory,
955 VmaAllocator allocator,
957 VkDeviceMemory memory,
1106 VmaAllocator* pAllocator);
1110 VmaAllocator allocator);
1117 VmaAllocator allocator,
1118 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1125 VmaAllocator allocator,
1126 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1135 VmaAllocator allocator,
1136 uint32_t memoryTypeIndex,
1137 VkMemoryPropertyFlags* pFlags);
1148 VmaAllocator allocator,
1149 uint32_t frameIndex);
1179 VmaAllocator allocator,
1182 #define VMA_STATS_STRING_ENABLED 1 1184 #if VMA_STATS_STRING_ENABLED 1190 VmaAllocator allocator,
1191 char** ppStatsString,
1192 VkBool32 detailedMap);
1195 VmaAllocator allocator,
1196 char* pStatsString);
1198 #endif // #if VMA_STATS_STRING_ENABLED 1200 VK_DEFINE_HANDLE(VmaPool)
1383 VmaAllocator allocator,
1384 uint32_t memoryTypeBits,
1386 uint32_t* pMemoryTypeIndex);
1401 VmaAllocator allocator,
1402 const VkBufferCreateInfo* pBufferCreateInfo,
1404 uint32_t* pMemoryTypeIndex);
1419 VmaAllocator allocator,
1420 const VkImageCreateInfo* pImageCreateInfo,
1422 uint32_t* pMemoryTypeIndex);
1523 VmaAllocator allocator,
1530 VmaAllocator allocator,
1540 VmaAllocator allocator,
1551 VmaAllocator allocator,
1553 size_t* pLostAllocationCount);
1555 VK_DEFINE_HANDLE(VmaAllocation)
1611 VmaAllocator allocator,
1612 const VkMemoryRequirements* pVkMemoryRequirements,
1614 VmaAllocation* pAllocation,
1624 VmaAllocator allocator,
1627 VmaAllocation* pAllocation,
1632 VmaAllocator allocator,
1635 VmaAllocation* pAllocation,
1640 VmaAllocator allocator,
1641 VmaAllocation allocation);
1660 VmaAllocator allocator,
1661 VmaAllocation allocation,
1679 VmaAllocator allocator,
1680 VmaAllocation allocation);
1696 VmaAllocator allocator,
1697 VmaAllocation allocation,
1711 VmaAllocator allocator,
1712 VmaAllocation* pAllocation);
1749 VmaAllocator allocator,
1750 VmaAllocation allocation,
1758 VmaAllocator allocator,
1759 VmaAllocation allocation);
1870 VmaAllocator allocator,
1871 VmaAllocation* pAllocations,
1872 size_t allocationCount,
1873 VkBool32* pAllocationsChanged,
1904 VmaAllocator allocator,
1905 const VkBufferCreateInfo* pBufferCreateInfo,
1908 VmaAllocation* pAllocation,
1923 VmaAllocator allocator,
1925 VmaAllocation allocation);
1929 VmaAllocator allocator,
1930 const VkImageCreateInfo* pImageCreateInfo,
1933 VmaAllocation* pAllocation,
1948 VmaAllocator allocator,
1950 VmaAllocation allocation);
1956 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1959 #ifdef __INTELLISENSE__ 1960 #define VMA_IMPLEMENTATION 1963 #ifdef VMA_IMPLEMENTATION 1964 #undef VMA_IMPLEMENTATION 1986 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 1987 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1999 #if VMA_USE_STL_CONTAINERS 2000 #define VMA_USE_STL_VECTOR 1 2001 #define VMA_USE_STL_UNORDERED_MAP 1 2002 #define VMA_USE_STL_LIST 1 2005 #if VMA_USE_STL_VECTOR 2009 #if VMA_USE_STL_UNORDERED_MAP 2010 #include <unordered_map> 2013 #if VMA_USE_STL_LIST 2022 #include <algorithm> 2026 #if !defined(_WIN32) && !defined(__APPLE__) 2032 #define VMA_NULL nullptr 2035 #if defined(__APPLE__) || defined(__ANDROID__) 2037 void *aligned_alloc(
size_t alignment,
size_t size)
2040 if(alignment <
sizeof(
void*))
2042 alignment =
sizeof(
void*);
2046 if(posix_memalign(&pointer, alignment, size) == 0)
2055 #define VMA_ASSERT(expr) assert(expr) 2057 #define VMA_ASSERT(expr) 2063 #ifndef VMA_HEAVY_ASSERT 2065 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2067 #define VMA_HEAVY_ASSERT(expr) 2071 #ifndef VMA_ALIGN_OF 2072 #define VMA_ALIGN_OF(type) (__alignof(type)) 2075 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2077 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2079 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2083 #ifndef VMA_SYSTEM_FREE 2085 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2087 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2092 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2096 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2100 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2104 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2107 #ifndef VMA_DEBUG_LOG 2108 #define VMA_DEBUG_LOG(format, ...) 2118 #if VMA_STATS_STRING_ENABLED 2119 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2121 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2123 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2125 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2127 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2129 snprintf(outStr, strLen,
"%p", ptr);
2139 void Lock() { m_Mutex.lock(); }
2140 void Unlock() { m_Mutex.unlock(); }
2144 #define VMA_MUTEX VmaMutex 2155 #ifndef VMA_ATOMIC_UINT32 2156 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2159 #ifndef VMA_BEST_FIT 2172 #define VMA_BEST_FIT (1) 2175 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2180 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2183 #ifndef VMA_DEBUG_ALIGNMENT 2188 #define VMA_DEBUG_ALIGNMENT (1) 2191 #ifndef VMA_DEBUG_MARGIN 2196 #define VMA_DEBUG_MARGIN (0) 2199 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2204 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2207 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2212 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2215 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2216 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2220 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2221 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2225 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2231 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2232 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2235 static inline uint32_t VmaCountBitsSet(uint32_t v)
2237 uint32_t c = v - ((v >> 1) & 0x55555555);
2238 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2239 c = ((c >> 4) + c) & 0x0F0F0F0F;
2240 c = ((c >> 8) + c) & 0x00FF00FF;
2241 c = ((c >> 16) + c) & 0x0000FFFF;
2247 template <
typename T>
2248 static inline T VmaAlignUp(T val, T align)
2250 return (val + align - 1) / align * align;
2254 template <
typename T>
2255 inline T VmaRoundDiv(T x, T y)
2257 return (x + (y / (T)2)) / y;
2262 template<
typename Iterator,
typename Compare>
2263 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2265 Iterator centerValue = end; --centerValue;
2266 Iterator insertIndex = beg;
2267 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2269 if(cmp(*memTypeIndex, *centerValue))
2271 if(insertIndex != memTypeIndex)
2273 VMA_SWAP(*memTypeIndex, *insertIndex);
2278 if(insertIndex != centerValue)
2280 VMA_SWAP(*insertIndex, *centerValue);
2285 template<
typename Iterator,
typename Compare>
2286 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2290 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2291 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2292 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2296 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2298 #endif // #ifndef VMA_SORT 2307 static inline bool VmaBlocksOnSamePage(
2308 VkDeviceSize resourceAOffset,
2309 VkDeviceSize resourceASize,
2310 VkDeviceSize resourceBOffset,
2311 VkDeviceSize pageSize)
2313 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2314 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2315 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2316 VkDeviceSize resourceBStart = resourceBOffset;
2317 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2318 return resourceAEndPage == resourceBStartPage;
2321 enum VmaSuballocationType
2323 VMA_SUBALLOCATION_TYPE_FREE = 0,
2324 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2325 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2326 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2327 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2328 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2329 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2338 static inline bool VmaIsBufferImageGranularityConflict(
2339 VmaSuballocationType suballocType1,
2340 VmaSuballocationType suballocType2)
2342 if(suballocType1 > suballocType2)
2344 VMA_SWAP(suballocType1, suballocType2);
2347 switch(suballocType1)
2349 case VMA_SUBALLOCATION_TYPE_FREE:
2351 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2353 case VMA_SUBALLOCATION_TYPE_BUFFER:
2355 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2356 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2357 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2359 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2360 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2361 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2362 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2364 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2365 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2377 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2378 m_pMutex(useMutex ? &mutex : VMA_NULL)
2395 VMA_MUTEX* m_pMutex;
2398 #if VMA_DEBUG_GLOBAL_MUTEX 2399 static VMA_MUTEX gDebugGlobalMutex;
2400 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2402 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2406 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2417 template <
typename IterT,
typename KeyT,
typename CmpT>
2418 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2420 size_t down = 0, up = (end - beg);
2423 const size_t mid = (down + up) / 2;
2424 if(cmp(*(beg+mid), key))
2439 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2441 if((pAllocationCallbacks != VMA_NULL) &&
2442 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2444 return (*pAllocationCallbacks->pfnAllocation)(
2445 pAllocationCallbacks->pUserData,
2448 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2452 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2456 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2458 if((pAllocationCallbacks != VMA_NULL) &&
2459 (pAllocationCallbacks->pfnFree != VMA_NULL))
2461 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2465 VMA_SYSTEM_FREE(ptr);
2469 template<
typename T>
2470 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2472 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2475 template<
typename T>
2476 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2478 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2481 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2483 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2485 template<
typename T>
2486 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2489 VmaFree(pAllocationCallbacks, ptr);
2492 template<
typename T>
2493 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2497 for(
size_t i = count; i--; )
2501 VmaFree(pAllocationCallbacks, ptr);
2506 template<
typename T>
2507 class VmaStlAllocator
2510 const VkAllocationCallbacks*
const m_pCallbacks;
2511 typedef T value_type;
2513 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2514 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2516 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2517 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2519 template<
typename U>
2520 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2522 return m_pCallbacks == rhs.m_pCallbacks;
2524 template<
typename U>
2525 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2527 return m_pCallbacks != rhs.m_pCallbacks;
2530 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2533 #if VMA_USE_STL_VECTOR 2535 #define VmaVector std::vector 2537 template<
typename T,
typename allocatorT>
2538 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2540 vec.insert(vec.begin() + index, item);
2543 template<
typename T,
typename allocatorT>
2544 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2546 vec.erase(vec.begin() + index);
2549 #else // #if VMA_USE_STL_VECTOR 2554 template<
typename T,
typename AllocatorT>
2558 typedef T value_type;
2560 VmaVector(
const AllocatorT& allocator) :
2561 m_Allocator(allocator),
2568 VmaVector(
size_t count,
const AllocatorT& allocator) :
2569 m_Allocator(allocator),
2570 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2576 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2577 m_Allocator(src.m_Allocator),
2578 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2579 m_Count(src.m_Count),
2580 m_Capacity(src.m_Count)
2584 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2590 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2593 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2597 resize(rhs.m_Count);
2600 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2606 bool empty()
const {
return m_Count == 0; }
2607 size_t size()
const {
return m_Count; }
2608 T* data() {
return m_pArray; }
2609 const T* data()
const {
return m_pArray; }
2611 T& operator[](
size_t index)
2613 VMA_HEAVY_ASSERT(index < m_Count);
2614 return m_pArray[index];
2616 const T& operator[](
size_t index)
const 2618 VMA_HEAVY_ASSERT(index < m_Count);
2619 return m_pArray[index];
2624 VMA_HEAVY_ASSERT(m_Count > 0);
2627 const T& front()
const 2629 VMA_HEAVY_ASSERT(m_Count > 0);
2634 VMA_HEAVY_ASSERT(m_Count > 0);
2635 return m_pArray[m_Count - 1];
2637 const T& back()
const 2639 VMA_HEAVY_ASSERT(m_Count > 0);
2640 return m_pArray[m_Count - 1];
2643 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2645 newCapacity = VMA_MAX(newCapacity, m_Count);
2647 if((newCapacity < m_Capacity) && !freeMemory)
2649 newCapacity = m_Capacity;
2652 if(newCapacity != m_Capacity)
2654 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2657 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2659 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2660 m_Capacity = newCapacity;
2661 m_pArray = newArray;
2665 void resize(
size_t newCount,
bool freeMemory =
false)
2667 size_t newCapacity = m_Capacity;
2668 if(newCount > m_Capacity)
2670 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2674 newCapacity = newCount;
2677 if(newCapacity != m_Capacity)
2679 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2680 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2681 if(elementsToCopy != 0)
2683 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2685 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2686 m_Capacity = newCapacity;
2687 m_pArray = newArray;
2693 void clear(
bool freeMemory =
false)
2695 resize(0, freeMemory);
2698 void insert(
size_t index,
const T& src)
2700 VMA_HEAVY_ASSERT(index <= m_Count);
2701 const size_t oldCount = size();
2702 resize(oldCount + 1);
2703 if(index < oldCount)
2705 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2707 m_pArray[index] = src;
2710 void remove(
size_t index)
2712 VMA_HEAVY_ASSERT(index < m_Count);
2713 const size_t oldCount = size();
2714 if(index < oldCount - 1)
2716 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2718 resize(oldCount - 1);
2721 void push_back(
const T& src)
2723 const size_t newIndex = size();
2724 resize(newIndex + 1);
2725 m_pArray[newIndex] = src;
2730 VMA_HEAVY_ASSERT(m_Count > 0);
2734 void push_front(
const T& src)
2741 VMA_HEAVY_ASSERT(m_Count > 0);
2745 typedef T* iterator;
2747 iterator begin() {
return m_pArray; }
2748 iterator end() {
return m_pArray + m_Count; }
2751 AllocatorT m_Allocator;
2757 template<
typename T,
typename allocatorT>
2758 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2760 vec.insert(index, item);
2763 template<
typename T,
typename allocatorT>
2764 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2769 #endif // #if VMA_USE_STL_VECTOR 2771 template<
typename CmpLess,
typename VectorT>
2772 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2774 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2776 vector.data() + vector.size(),
2778 CmpLess()) - vector.data();
2779 VmaVectorInsert(vector, indexToInsert, value);
2780 return indexToInsert;
2783 template<
typename CmpLess,
typename VectorT>
2784 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2787 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2792 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2794 size_t indexToRemove = it - vector.begin();
2795 VmaVectorRemove(vector, indexToRemove);
2801 template<
typename CmpLess,
typename VectorT>
2802 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2805 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2807 vector.data() + vector.size(),
2810 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2812 return it - vector.begin();
2816 return vector.size();
2828 template<
typename T>
2829 class VmaPoolAllocator
2832 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2833 ~VmaPoolAllocator();
2841 uint32_t NextFreeIndex;
2848 uint32_t FirstFreeIndex;
2851 const VkAllocationCallbacks* m_pAllocationCallbacks;
2852 size_t m_ItemsPerBlock;
2853 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2855 ItemBlock& CreateNewBlock();
2858 template<
typename T>
2859 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2860 m_pAllocationCallbacks(pAllocationCallbacks),
2861 m_ItemsPerBlock(itemsPerBlock),
2862 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2864 VMA_ASSERT(itemsPerBlock > 0);
2867 template<
typename T>
2868 VmaPoolAllocator<T>::~VmaPoolAllocator()
2873 template<
typename T>
2874 void VmaPoolAllocator<T>::Clear()
2876 for(
size_t i = m_ItemBlocks.size(); i--; )
2877 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2878 m_ItemBlocks.clear();
2881 template<
typename T>
2882 T* VmaPoolAllocator<T>::Alloc()
2884 for(
size_t i = m_ItemBlocks.size(); i--; )
2886 ItemBlock& block = m_ItemBlocks[i];
2888 if(block.FirstFreeIndex != UINT32_MAX)
2890 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2891 block.FirstFreeIndex = pItem->NextFreeIndex;
2892 return &pItem->Value;
2897 ItemBlock& newBlock = CreateNewBlock();
2898 Item*
const pItem = &newBlock.pItems[0];
2899 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2900 return &pItem->Value;
2903 template<
typename T>
2904 void VmaPoolAllocator<T>::Free(T* ptr)
2907 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2909 ItemBlock& block = m_ItemBlocks[i];
2913 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2916 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2918 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2919 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2920 block.FirstFreeIndex = index;
2924 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2927 template<
typename T>
2928 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2930 ItemBlock newBlock = {
2931 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2933 m_ItemBlocks.push_back(newBlock);
2936 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2937 newBlock.pItems[i].NextFreeIndex = i + 1;
2938 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2939 return m_ItemBlocks.back();
2945 #if VMA_USE_STL_LIST 2947 #define VmaList std::list 2949 #else // #if VMA_USE_STL_LIST 2951 template<
typename T>
2960 template<
typename T>
2964 typedef VmaListItem<T> ItemType;
2966 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2970 size_t GetCount()
const {
return m_Count; }
2971 bool IsEmpty()
const {
return m_Count == 0; }
2973 ItemType* Front() {
return m_pFront; }
2974 const ItemType* Front()
const {
return m_pFront; }
2975 ItemType* Back() {
return m_pBack; }
2976 const ItemType* Back()
const {
return m_pBack; }
2978 ItemType* PushBack();
2979 ItemType* PushFront();
2980 ItemType* PushBack(
const T& value);
2981 ItemType* PushFront(
const T& value);
2986 ItemType* InsertBefore(ItemType* pItem);
2988 ItemType* InsertAfter(ItemType* pItem);
2990 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2991 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2993 void Remove(ItemType* pItem);
2996 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2997 VmaPoolAllocator<ItemType> m_ItemAllocator;
3003 VmaRawList(
const VmaRawList<T>& src);
3004 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
3007 template<
typename T>
3008 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3009 m_pAllocationCallbacks(pAllocationCallbacks),
3010 m_ItemAllocator(pAllocationCallbacks, 128),
3017 template<
typename T>
3018 VmaRawList<T>::~VmaRawList()
3024 template<
typename T>
3025 void VmaRawList<T>::Clear()
3027 if(IsEmpty() ==
false)
3029 ItemType* pItem = m_pBack;
3030 while(pItem != VMA_NULL)
3032 ItemType*
const pPrevItem = pItem->pPrev;
3033 m_ItemAllocator.Free(pItem);
3036 m_pFront = VMA_NULL;
3042 template<
typename T>
3043 VmaListItem<T>* VmaRawList<T>::PushBack()
3045 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3046 pNewItem->pNext = VMA_NULL;
3049 pNewItem->pPrev = VMA_NULL;
3050 m_pFront = pNewItem;
3056 pNewItem->pPrev = m_pBack;
3057 m_pBack->pNext = pNewItem;
3064 template<
typename T>
3065 VmaListItem<T>* VmaRawList<T>::PushFront()
3067 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3068 pNewItem->pPrev = VMA_NULL;
3071 pNewItem->pNext = VMA_NULL;
3072 m_pFront = pNewItem;
3078 pNewItem->pNext = m_pFront;
3079 m_pFront->pPrev = pNewItem;
3080 m_pFront = pNewItem;
3086 template<
typename T>
3087 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3089 ItemType*
const pNewItem = PushBack();
3090 pNewItem->Value = value;
3094 template<
typename T>
3095 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3097 ItemType*
const pNewItem = PushFront();
3098 pNewItem->Value = value;
3102 template<
typename T>
3103 void VmaRawList<T>::PopBack()
3105 VMA_HEAVY_ASSERT(m_Count > 0);
3106 ItemType*
const pBackItem = m_pBack;
3107 ItemType*
const pPrevItem = pBackItem->pPrev;
3108 if(pPrevItem != VMA_NULL)
3110 pPrevItem->pNext = VMA_NULL;
3112 m_pBack = pPrevItem;
3113 m_ItemAllocator.Free(pBackItem);
3117 template<
typename T>
3118 void VmaRawList<T>::PopFront()
3120 VMA_HEAVY_ASSERT(m_Count > 0);
3121 ItemType*
const pFrontItem = m_pFront;
3122 ItemType*
const pNextItem = pFrontItem->pNext;
3123 if(pNextItem != VMA_NULL)
3125 pNextItem->pPrev = VMA_NULL;
3127 m_pFront = pNextItem;
3128 m_ItemAllocator.Free(pFrontItem);
3132 template<
typename T>
3133 void VmaRawList<T>::Remove(ItemType* pItem)
3135 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3136 VMA_HEAVY_ASSERT(m_Count > 0);
3138 if(pItem->pPrev != VMA_NULL)
3140 pItem->pPrev->pNext = pItem->pNext;
3144 VMA_HEAVY_ASSERT(m_pFront == pItem);
3145 m_pFront = pItem->pNext;
3148 if(pItem->pNext != VMA_NULL)
3150 pItem->pNext->pPrev = pItem->pPrev;
3154 VMA_HEAVY_ASSERT(m_pBack == pItem);
3155 m_pBack = pItem->pPrev;
3158 m_ItemAllocator.Free(pItem);
3162 template<
typename T>
3163 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3165 if(pItem != VMA_NULL)
3167 ItemType*
const prevItem = pItem->pPrev;
3168 ItemType*
const newItem = m_ItemAllocator.Alloc();
3169 newItem->pPrev = prevItem;
3170 newItem->pNext = pItem;
3171 pItem->pPrev = newItem;
3172 if(prevItem != VMA_NULL)
3174 prevItem->pNext = newItem;
3178 VMA_HEAVY_ASSERT(m_pFront == pItem);
3188 template<
typename T>
3189 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3191 if(pItem != VMA_NULL)
3193 ItemType*
const nextItem = pItem->pNext;
3194 ItemType*
const newItem = m_ItemAllocator.Alloc();
3195 newItem->pNext = nextItem;
3196 newItem->pPrev = pItem;
3197 pItem->pNext = newItem;
3198 if(nextItem != VMA_NULL)
3200 nextItem->pPrev = newItem;
3204 VMA_HEAVY_ASSERT(m_pBack == pItem);
3214 template<
typename T>
3215 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3217 ItemType*
const newItem = InsertBefore(pItem);
3218 newItem->Value = value;
3222 template<
typename T>
3223 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3225 ItemType*
const newItem = InsertAfter(pItem);
3226 newItem->Value = value;
3230 template<
typename T,
typename AllocatorT>
3243 T& operator*()
const 3245 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3246 return m_pItem->Value;
3248 T* operator->()
const 3250 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3251 return &m_pItem->Value;
3254 iterator& operator++()
3256 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3257 m_pItem = m_pItem->pNext;
3260 iterator& operator--()
3262 if(m_pItem != VMA_NULL)
3264 m_pItem = m_pItem->pPrev;
3268 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3269 m_pItem = m_pList->Back();
3274 iterator operator++(
int)
3276 iterator result = *
this;
3280 iterator operator--(
int)
3282 iterator result = *
this;
3287 bool operator==(
const iterator& rhs)
const 3289 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3290 return m_pItem == rhs.m_pItem;
3292 bool operator!=(
const iterator& rhs)
const 3294 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3295 return m_pItem != rhs.m_pItem;
3299 VmaRawList<T>* m_pList;
3300 VmaListItem<T>* m_pItem;
3302 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3308 friend class VmaList<T, AllocatorT>;
3311 class const_iterator
3320 const_iterator(
const iterator& src) :
3321 m_pList(src.m_pList),
3322 m_pItem(src.m_pItem)
3326 const T& operator*()
const 3328 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3329 return m_pItem->Value;
3331 const T* operator->()
const 3333 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3334 return &m_pItem->Value;
3337 const_iterator& operator++()
3339 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3340 m_pItem = m_pItem->pNext;
3343 const_iterator& operator--()
3345 if(m_pItem != VMA_NULL)
3347 m_pItem = m_pItem->pPrev;
3351 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3352 m_pItem = m_pList->Back();
3357 const_iterator operator++(
int)
3359 const_iterator result = *
this;
3363 const_iterator operator--(
int)
3365 const_iterator result = *
this;
3370 bool operator==(
const const_iterator& rhs)
const 3372 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3373 return m_pItem == rhs.m_pItem;
3375 bool operator!=(
const const_iterator& rhs)
const 3377 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3378 return m_pItem != rhs.m_pItem;
3382 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3388 const VmaRawList<T>* m_pList;
3389 const VmaListItem<T>* m_pItem;
3391 friend class VmaList<T, AllocatorT>;
3394 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3396 bool empty()
const {
return m_RawList.IsEmpty(); }
3397 size_t size()
const {
return m_RawList.GetCount(); }
3399 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3400 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3402 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3403 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3405 void clear() { m_RawList.Clear(); }
3406 void push_back(
const T& value) { m_RawList.PushBack(value); }
3407 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3408 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3411 VmaRawList<T> m_RawList;
3414 #endif // #if VMA_USE_STL_LIST 3422 #if VMA_USE_STL_UNORDERED_MAP 3424 #define VmaPair std::pair 3426 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3427 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3429 #else // #if VMA_USE_STL_UNORDERED_MAP 3431 template<
typename T1,
typename T2>
3437 VmaPair() : first(), second() { }
3438 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3444 template<
typename KeyT,
typename ValueT>
3448 typedef VmaPair<KeyT, ValueT> PairType;
3449 typedef PairType* iterator;
3451 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3453 iterator begin() {
return m_Vector.begin(); }
3454 iterator end() {
return m_Vector.end(); }
3456 void insert(
const PairType& pair);
3457 iterator find(
const KeyT& key);
3458 void erase(iterator it);
3461 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3464 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3466 template<
typename FirstT,
typename SecondT>
3467 struct VmaPairFirstLess
3469 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3471 return lhs.first < rhs.first;
3473 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3475 return lhs.first < rhsFirst;
3479 template<
typename KeyT,
typename ValueT>
3480 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3482 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3484 m_Vector.data() + m_Vector.size(),
3486 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3487 VmaVectorInsert(m_Vector, indexToInsert, pair);
3490 template<
typename KeyT,
typename ValueT>
3491 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3493 PairType* it = VmaBinaryFindFirstNotLess(
3495 m_Vector.data() + m_Vector.size(),
3497 VmaPairFirstLess<KeyT, ValueT>());
3498 if((it != m_Vector.end()) && (it->first == key))
3504 return m_Vector.end();
3508 template<
typename KeyT,
typename ValueT>
3509 void VmaMap<KeyT, ValueT>::erase(iterator it)
3511 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3514 #endif // #if VMA_USE_STL_UNORDERED_MAP 3520 class VmaDeviceMemoryBlock;
3522 struct VmaAllocation_T
3525 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3529 FLAG_USER_DATA_STRING = 0x01,
3533 enum ALLOCATION_TYPE
3535 ALLOCATION_TYPE_NONE,
3536 ALLOCATION_TYPE_BLOCK,
3537 ALLOCATION_TYPE_DEDICATED,
3540 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3543 m_pUserData(VMA_NULL),
3544 m_LastUseFrameIndex(currentFrameIndex),
3545 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3546 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3548 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3554 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3557 VMA_ASSERT(m_pUserData == VMA_NULL);
3560 void InitBlockAllocation(
3562 VmaDeviceMemoryBlock* block,
3563 VkDeviceSize offset,
3564 VkDeviceSize alignment,
3566 VmaSuballocationType suballocationType,
3570 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3571 VMA_ASSERT(block != VMA_NULL);
3572 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3573 m_Alignment = alignment;
3575 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3576 m_SuballocationType = (uint8_t)suballocationType;
3577 m_BlockAllocation.m_hPool = hPool;
3578 m_BlockAllocation.m_Block = block;
3579 m_BlockAllocation.m_Offset = offset;
3580 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3585 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3586 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3587 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3588 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3589 m_BlockAllocation.m_Block = VMA_NULL;
3590 m_BlockAllocation.m_Offset = 0;
3591 m_BlockAllocation.m_CanBecomeLost =
true;
3594 void ChangeBlockAllocation(
3595 VmaAllocator hAllocator,
3596 VmaDeviceMemoryBlock* block,
3597 VkDeviceSize offset);
3600 void InitDedicatedAllocation(
3601 uint32_t memoryTypeIndex,
3602 VkDeviceMemory hMemory,
3603 VmaSuballocationType suballocationType,
3607 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3608 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3609 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3612 m_SuballocationType = (uint8_t)suballocationType;
3613 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3614 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3615 m_DedicatedAllocation.m_hMemory = hMemory;
3616 m_DedicatedAllocation.m_pMappedData = pMappedData;
3619 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3620 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3621 VkDeviceSize GetSize()
const {
return m_Size; }
3622 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3623 void* GetUserData()
const {
return m_pUserData; }
3624 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3625 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3627 VmaDeviceMemoryBlock* GetBlock()
const 3629 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3630 return m_BlockAllocation.m_Block;
3632 VkDeviceSize GetOffset()
const;
3633 VkDeviceMemory GetMemory()
const;
3634 uint32_t GetMemoryTypeIndex()
const;
3635 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3636 void* GetMappedData()
const;
3637 bool CanBecomeLost()
const;
3638 VmaPool GetPool()
const;
3640 uint32_t GetLastUseFrameIndex()
const 3642 return m_LastUseFrameIndex.load();
3644 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3646 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3656 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3658 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3660 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3671 void BlockAllocMap();
3672 void BlockAllocUnmap();
3673 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3674 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3677 VkDeviceSize m_Alignment;
3678 VkDeviceSize m_Size;
3680 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3682 uint8_t m_SuballocationType;
3689 struct BlockAllocation
3692 VmaDeviceMemoryBlock* m_Block;
3693 VkDeviceSize m_Offset;
3694 bool m_CanBecomeLost;
3698 struct DedicatedAllocation
3700 uint32_t m_MemoryTypeIndex;
3701 VkDeviceMemory m_hMemory;
3702 void* m_pMappedData;
3708 BlockAllocation m_BlockAllocation;
3710 DedicatedAllocation m_DedicatedAllocation;
3713 void FreeUserDataString(VmaAllocator hAllocator);
3720 struct VmaSuballocation
3722 VkDeviceSize offset;
3724 VmaAllocation hAllocation;
3725 VmaSuballocationType type;
3728 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3731 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3746 struct VmaAllocationRequest
3748 VkDeviceSize offset;
3749 VkDeviceSize sumFreeSize;
3750 VkDeviceSize sumItemSize;
3751 VmaSuballocationList::iterator item;
3752 size_t itemsToMakeLostCount;
3754 VkDeviceSize CalcCost()
const 3756 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3764 class VmaBlockMetadata
3767 VmaBlockMetadata(VmaAllocator hAllocator);
3768 ~VmaBlockMetadata();
3769 void Init(VkDeviceSize size);
3772 bool Validate()
const;
3773 VkDeviceSize GetSize()
const {
return m_Size; }
3774 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3775 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3776 VkDeviceSize GetUnusedRangeSizeMax()
const;
3778 bool IsEmpty()
const;
3780 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3783 #if VMA_STATS_STRING_ENABLED 3784 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3788 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3793 bool CreateAllocationRequest(
3794 uint32_t currentFrameIndex,
3795 uint32_t frameInUseCount,
3796 VkDeviceSize bufferImageGranularity,
3797 VkDeviceSize allocSize,
3798 VkDeviceSize allocAlignment,
3799 VmaSuballocationType allocType,
3800 bool canMakeOtherLost,
3801 VmaAllocationRequest* pAllocationRequest);
3803 bool MakeRequestedAllocationsLost(
3804 uint32_t currentFrameIndex,
3805 uint32_t frameInUseCount,
3806 VmaAllocationRequest* pAllocationRequest);
3808 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3812 const VmaAllocationRequest& request,
3813 VmaSuballocationType type,
3814 VkDeviceSize allocSize,
3815 VmaAllocation hAllocation);
3818 void Free(
const VmaAllocation allocation);
3819 void FreeAtOffset(VkDeviceSize offset);
3822 VkDeviceSize m_Size;
3823 uint32_t m_FreeCount;
3824 VkDeviceSize m_SumFreeSize;
3825 VmaSuballocationList m_Suballocations;
3828 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3830 bool ValidateFreeSuballocationList()
const;
3834 bool CheckAllocation(
3835 uint32_t currentFrameIndex,
3836 uint32_t frameInUseCount,
3837 VkDeviceSize bufferImageGranularity,
3838 VkDeviceSize allocSize,
3839 VkDeviceSize allocAlignment,
3840 VmaSuballocationType allocType,
3841 VmaSuballocationList::const_iterator suballocItem,
3842 bool canMakeOtherLost,
3843 VkDeviceSize* pOffset,
3844 size_t* itemsToMakeLostCount,
3845 VkDeviceSize* pSumFreeSize,
3846 VkDeviceSize* pSumItemSize)
const;
3848 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3852 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3855 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3858 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3862 class VmaDeviceMemoryMapping
3865 VmaDeviceMemoryMapping();
3866 ~VmaDeviceMemoryMapping();
3868 void* GetMappedData()
const {
return m_pMappedData; }
3871 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData);
3872 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count);
3876 uint32_t m_MapCount;
3877 void* m_pMappedData;
3886 class VmaDeviceMemoryBlock
3889 uint32_t m_MemoryTypeIndex;
3890 VkDeviceMemory m_hMemory;
3891 VmaDeviceMemoryMapping m_Mapping;
3892 VmaBlockMetadata m_Metadata;
3894 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3896 ~VmaDeviceMemoryBlock()
3898 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3903 uint32_t newMemoryTypeIndex,
3904 VkDeviceMemory newMemory,
3905 VkDeviceSize newSize);
3907 void Destroy(VmaAllocator allocator);
3910 bool Validate()
const;
3913 VkResult Map(VmaAllocator hAllocator, uint32_t count,
void** ppData);
3914 void Unmap(VmaAllocator hAllocator, uint32_t count);
3917 struct VmaPointerLess
3919 bool operator()(
const void* lhs,
const void* rhs)
const 3925 class VmaDefragmentator;
3933 struct VmaBlockVector
3936 VmaAllocator hAllocator,
3937 uint32_t memoryTypeIndex,
3938 VkDeviceSize preferredBlockSize,
3939 size_t minBlockCount,
3940 size_t maxBlockCount,
3941 VkDeviceSize bufferImageGranularity,
3942 uint32_t frameInUseCount,
3946 VkResult CreateMinBlocks();
3948 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3949 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3950 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3951 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3955 bool IsEmpty()
const {
return m_Blocks.empty(); }
3958 VmaPool hCurrentPool,
3959 uint32_t currentFrameIndex,
3960 const VkMemoryRequirements& vkMemReq,
3962 VmaSuballocationType suballocType,
3963 VmaAllocation* pAllocation);
3966 VmaAllocation hAllocation);
3971 #if VMA_STATS_STRING_ENABLED 3972 void PrintDetailedMap(
class VmaJsonWriter& json);
3975 void MakePoolAllocationsLost(
3976 uint32_t currentFrameIndex,
3977 size_t* pLostAllocationCount);
3979 VmaDefragmentator* EnsureDefragmentator(
3980 VmaAllocator hAllocator,
3981 uint32_t currentFrameIndex);
3983 VkResult Defragment(
3985 VkDeviceSize& maxBytesToMove,
3986 uint32_t& maxAllocationsToMove);
3988 void DestroyDefragmentator();
3991 friend class VmaDefragmentator;
3993 const VmaAllocator m_hAllocator;
3994 const uint32_t m_MemoryTypeIndex;
3995 const VkDeviceSize m_PreferredBlockSize;
3996 const size_t m_MinBlockCount;
3997 const size_t m_MaxBlockCount;
3998 const VkDeviceSize m_BufferImageGranularity;
3999 const uint32_t m_FrameInUseCount;
4000 const bool m_IsCustomPool;
4003 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
4007 bool m_HasEmptyBlock;
4008 VmaDefragmentator* m_pDefragmentator;
4010 size_t CalcMaxBlockSize()
const;
4013 void Remove(VmaDeviceMemoryBlock* pBlock);
4017 void IncrementallySortBlocks();
4019 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
4025 VmaBlockVector m_BlockVector;
4029 VmaAllocator hAllocator,
4033 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
4035 #if VMA_STATS_STRING_ENABLED 4040 class VmaDefragmentator
4042 const VmaAllocator m_hAllocator;
4043 VmaBlockVector*
const m_pBlockVector;
4044 uint32_t m_CurrentFrameIndex;
4045 VkDeviceSize m_BytesMoved;
4046 uint32_t m_AllocationsMoved;
4048 struct AllocationInfo
4050 VmaAllocation m_hAllocation;
4051 VkBool32* m_pChanged;
4054 m_hAllocation(VK_NULL_HANDLE),
4055 m_pChanged(VMA_NULL)
4060 struct AllocationInfoSizeGreater
4062 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 4064 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4069 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4073 VmaDeviceMemoryBlock* m_pBlock;
4074 bool m_HasNonMovableAllocations;
4075 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4077 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4079 m_HasNonMovableAllocations(true),
4080 m_Allocations(pAllocationCallbacks),
4081 m_pMappedDataForDefragmentation(VMA_NULL)
4085 void CalcHasNonMovableAllocations()
4087 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4088 const size_t defragmentAllocCount = m_Allocations.size();
4089 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4092 void SortAllocationsBySizeDescecnding()
4094 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4097 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
4098 void Unmap(VmaAllocator hAllocator);
4102 void* m_pMappedDataForDefragmentation;
4105 struct BlockPointerLess
4107 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4109 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4111 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4113 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4119 struct BlockInfoCompareMoveDestination
4121 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4123 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4127 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4131 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4139 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4140 BlockInfoVector m_Blocks;
4142 VkResult DefragmentRound(
4143 VkDeviceSize maxBytesToMove,
4144 uint32_t maxAllocationsToMove);
4146 static bool MoveMakesSense(
4147 size_t dstBlockIndex, VkDeviceSize dstOffset,
4148 size_t srcBlockIndex, VkDeviceSize srcOffset);
4152 VmaAllocator hAllocator,
4153 VmaBlockVector* pBlockVector,
4154 uint32_t currentFrameIndex);
4156 ~VmaDefragmentator();
4158 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4159 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4161 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
4163 VkResult Defragment(
4164 VkDeviceSize maxBytesToMove,
4165 uint32_t maxAllocationsToMove);
4169 struct VmaAllocator_T
4172 bool m_UseKhrDedicatedAllocation;
4174 bool m_AllocationCallbacksSpecified;
4175 VkAllocationCallbacks m_AllocationCallbacks;
4179 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4180 VMA_MUTEX m_HeapSizeLimitMutex;
4182 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4183 VkPhysicalDeviceMemoryProperties m_MemProps;
4186 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4189 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4190 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4191 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4196 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4198 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4202 return m_VulkanFunctions;
4205 VkDeviceSize GetBufferImageGranularity()
const 4208 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4209 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4212 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4213 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4215 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4217 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4218 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4221 void GetBufferMemoryRequirements(
4223 VkMemoryRequirements& memReq,
4224 bool& requiresDedicatedAllocation,
4225 bool& prefersDedicatedAllocation)
const;
4226 void GetImageMemoryRequirements(
4228 VkMemoryRequirements& memReq,
4229 bool& requiresDedicatedAllocation,
4230 bool& prefersDedicatedAllocation)
const;
4233 VkResult AllocateMemory(
4234 const VkMemoryRequirements& vkMemReq,
4235 bool requiresDedicatedAllocation,
4236 bool prefersDedicatedAllocation,
4237 VkBuffer dedicatedBuffer,
4238 VkImage dedicatedImage,
4240 VmaSuballocationType suballocType,
4241 VmaAllocation* pAllocation);
4244 void FreeMemory(
const VmaAllocation allocation);
4246 void CalculateStats(
VmaStats* pStats);
4248 #if VMA_STATS_STRING_ENABLED 4249 void PrintDetailedMap(
class VmaJsonWriter& json);
4252 VkResult Defragment(
4253 VmaAllocation* pAllocations,
4254 size_t allocationCount,
4255 VkBool32* pAllocationsChanged,
4259 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
4260 bool TouchAllocation(VmaAllocation hAllocation);
4263 void DestroyPool(VmaPool pool);
4264 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
4266 void SetCurrentFrameIndex(uint32_t frameIndex);
4268 void MakePoolAllocationsLost(
4270 size_t* pLostAllocationCount);
4272 void CreateLostAllocation(VmaAllocation* pAllocation);
4274 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4275 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4277 VkResult Map(VmaAllocation hAllocation,
void** ppData);
4278 void Unmap(VmaAllocation hAllocation);
4281 VkDeviceSize m_PreferredLargeHeapBlockSize;
4283 VkPhysicalDevice m_PhysicalDevice;
4284 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4286 VMA_MUTEX m_PoolsMutex;
4288 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4294 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4296 VkResult AllocateMemoryOfType(
4297 const VkMemoryRequirements& vkMemReq,
4298 bool dedicatedAllocation,
4299 VkBuffer dedicatedBuffer,
4300 VkImage dedicatedImage,
4302 uint32_t memTypeIndex,
4303 VmaSuballocationType suballocType,
4304 VmaAllocation* pAllocation);
4307 VkResult AllocateDedicatedMemory(
4309 VmaSuballocationType suballocType,
4310 uint32_t memTypeIndex,
4312 bool isUserDataString,
4314 VkBuffer dedicatedBuffer,
4315 VkImage dedicatedImage,
4316 VmaAllocation* pAllocation);
4319 void FreeDedicatedMemory(VmaAllocation allocation);
4325 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
4327 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4330 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
4332 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4335 template<
typename T>
4336 static T* VmaAllocate(VmaAllocator hAllocator)
4338 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4341 template<
typename T>
4342 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
4344 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4347 template<
typename T>
4348 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4353 VmaFree(hAllocator, ptr);
4357 template<
typename T>
4358 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4362 for(
size_t i = count; i--; )
4364 VmaFree(hAllocator, ptr);
4371 #if VMA_STATS_STRING_ENABLED 4373 class VmaStringBuilder
4376 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4377 size_t GetLength()
const {
return m_Data.size(); }
4378 const char* GetData()
const {
return m_Data.data(); }
4380 void Add(
char ch) { m_Data.push_back(ch); }
4381 void Add(
const char* pStr);
4382 void AddNewLine() { Add(
'\n'); }
4383 void AddNumber(uint32_t num);
4384 void AddNumber(uint64_t num);
4385 void AddPointer(
const void* ptr);
4388 VmaVector< char, VmaStlAllocator<char> > m_Data;
4391 void VmaStringBuilder::Add(
const char* pStr)
4393 const size_t strLen = strlen(pStr);
4396 const size_t oldCount = m_Data.size();
4397 m_Data.resize(oldCount + strLen);
4398 memcpy(m_Data.data() + oldCount, pStr, strLen);
4402 void VmaStringBuilder::AddNumber(uint32_t num)
4405 VmaUint32ToStr(buf,
sizeof(buf), num);
4409 void VmaStringBuilder::AddNumber(uint64_t num)
4412 VmaUint64ToStr(buf,
sizeof(buf), num);
4416 void VmaStringBuilder::AddPointer(
const void* ptr)
4419 VmaPtrToStr(buf,
sizeof(buf), ptr);
4423 #endif // #if VMA_STATS_STRING_ENABLED 4428 #if VMA_STATS_STRING_ENABLED 4433 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4436 void BeginObject(
bool singleLine =
false);
4439 void BeginArray(
bool singleLine =
false);
4442 void WriteString(
const char* pStr);
4443 void BeginString(
const char* pStr = VMA_NULL);
4444 void ContinueString(
const char* pStr);
4445 void ContinueString(uint32_t n);
4446 void ContinueString(uint64_t n);
4447 void ContinueString_Pointer(
const void* ptr);
4448 void EndString(
const char* pStr = VMA_NULL);
4450 void WriteNumber(uint32_t n);
4451 void WriteNumber(uint64_t n);
4452 void WriteBool(
bool b);
4456 static const char*
const INDENT;
4458 enum COLLECTION_TYPE
4460 COLLECTION_TYPE_OBJECT,
4461 COLLECTION_TYPE_ARRAY,
4465 COLLECTION_TYPE type;
4466 uint32_t valueCount;
4467 bool singleLineMode;
4470 VmaStringBuilder& m_SB;
4471 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4472 bool m_InsideString;
4474 void BeginValue(
bool isString);
4475 void WriteIndent(
bool oneLess =
false);
4478 const char*
const VmaJsonWriter::INDENT =
" ";
4480 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4482 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4483 m_InsideString(false)
4487 VmaJsonWriter::~VmaJsonWriter()
4489 VMA_ASSERT(!m_InsideString);
4490 VMA_ASSERT(m_Stack.empty());
4493 void VmaJsonWriter::BeginObject(
bool singleLine)
4495 VMA_ASSERT(!m_InsideString);
4501 item.type = COLLECTION_TYPE_OBJECT;
4502 item.valueCount = 0;
4503 item.singleLineMode = singleLine;
4504 m_Stack.push_back(item);
4507 void VmaJsonWriter::EndObject()
4509 VMA_ASSERT(!m_InsideString);
4514 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4518 void VmaJsonWriter::BeginArray(
bool singleLine)
4520 VMA_ASSERT(!m_InsideString);
4526 item.type = COLLECTION_TYPE_ARRAY;
4527 item.valueCount = 0;
4528 item.singleLineMode = singleLine;
4529 m_Stack.push_back(item);
4532 void VmaJsonWriter::EndArray()
4534 VMA_ASSERT(!m_InsideString);
4539 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4543 void VmaJsonWriter::WriteString(
const char* pStr)
4549 void VmaJsonWriter::BeginString(
const char* pStr)
4551 VMA_ASSERT(!m_InsideString);
4555 m_InsideString =
true;
4556 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4558 ContinueString(pStr);
4562 void VmaJsonWriter::ContinueString(
const char* pStr)
4564 VMA_ASSERT(m_InsideString);
4566 const size_t strLen = strlen(pStr);
4567 for(
size_t i = 0; i < strLen; ++i)
4600 VMA_ASSERT(0 &&
"Character not currently supported.");
4606 void VmaJsonWriter::ContinueString(uint32_t n)
4608 VMA_ASSERT(m_InsideString);
4612 void VmaJsonWriter::ContinueString(uint64_t n)
4614 VMA_ASSERT(m_InsideString);
4618 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4620 VMA_ASSERT(m_InsideString);
4621 m_SB.AddPointer(ptr);
4624 void VmaJsonWriter::EndString(
const char* pStr)
4626 VMA_ASSERT(m_InsideString);
4627 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4629 ContinueString(pStr);
4632 m_InsideString =
false;
4635 void VmaJsonWriter::WriteNumber(uint32_t n)
4637 VMA_ASSERT(!m_InsideString);
4642 void VmaJsonWriter::WriteNumber(uint64_t n)
4644 VMA_ASSERT(!m_InsideString);
4649 void VmaJsonWriter::WriteBool(
bool b)
4651 VMA_ASSERT(!m_InsideString);
4653 m_SB.Add(b ?
"true" :
"false");
4656 void VmaJsonWriter::WriteNull()
4658 VMA_ASSERT(!m_InsideString);
4663 void VmaJsonWriter::BeginValue(
bool isString)
4665 if(!m_Stack.empty())
4667 StackItem& currItem = m_Stack.back();
4668 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4669 currItem.valueCount % 2 == 0)
4671 VMA_ASSERT(isString);
4674 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4675 currItem.valueCount % 2 != 0)
4679 else if(currItem.valueCount > 0)
4688 ++currItem.valueCount;
4692 void VmaJsonWriter::WriteIndent(
bool oneLess)
4694 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4698 size_t count = m_Stack.size();
4699 if(count > 0 && oneLess)
4703 for(
size_t i = 0; i < count; ++i)
4710 #endif // #if VMA_STATS_STRING_ENABLED 4714 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4716 if(IsUserDataString())
4718 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4720 FreeUserDataString(hAllocator);
4722 if(pUserData != VMA_NULL)
4724 const char*
const newStrSrc = (
char*)pUserData;
4725 const size_t newStrLen = strlen(newStrSrc);
4726 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4727 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4728 m_pUserData = newStrDst;
4733 m_pUserData = pUserData;
4737 void VmaAllocation_T::ChangeBlockAllocation(
4738 VmaAllocator hAllocator,
4739 VmaDeviceMemoryBlock* block,
4740 VkDeviceSize offset)
4742 VMA_ASSERT(block != VMA_NULL);
4743 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4746 if(block != m_BlockAllocation.m_Block)
4748 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4749 if(IsPersistentMap())
4751 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4752 block->Map(hAllocator, mapRefCount, VMA_NULL);
4755 m_BlockAllocation.m_Block = block;
4756 m_BlockAllocation.m_Offset = offset;
4759 VkDeviceSize VmaAllocation_T::GetOffset()
const 4763 case ALLOCATION_TYPE_BLOCK:
4764 return m_BlockAllocation.m_Offset;
4765 case ALLOCATION_TYPE_DEDICATED:
4773 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4777 case ALLOCATION_TYPE_BLOCK:
4778 return m_BlockAllocation.m_Block->m_hMemory;
4779 case ALLOCATION_TYPE_DEDICATED:
4780 return m_DedicatedAllocation.m_hMemory;
4783 return VK_NULL_HANDLE;
4787 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4791 case ALLOCATION_TYPE_BLOCK:
4792 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4793 case ALLOCATION_TYPE_DEDICATED:
4794 return m_DedicatedAllocation.m_MemoryTypeIndex;
4801 void* VmaAllocation_T::GetMappedData()
const 4805 case ALLOCATION_TYPE_BLOCK:
4808 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4809 VMA_ASSERT(pBlockData != VMA_NULL);
4810 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4817 case ALLOCATION_TYPE_DEDICATED:
4818 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4819 return m_DedicatedAllocation.m_pMappedData;
4826 bool VmaAllocation_T::CanBecomeLost()
const 4830 case ALLOCATION_TYPE_BLOCK:
4831 return m_BlockAllocation.m_CanBecomeLost;
4832 case ALLOCATION_TYPE_DEDICATED:
4840 VmaPool VmaAllocation_T::GetPool()
const 4842 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4843 return m_BlockAllocation.m_hPool;
4846 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4848 VMA_ASSERT(CanBecomeLost());
4854 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4857 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4862 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4868 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4878 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4880 VMA_ASSERT(IsUserDataString());
4881 if(m_pUserData != VMA_NULL)
4883 char*
const oldStr = (
char*)m_pUserData;
4884 const size_t oldStrLen = strlen(oldStr);
4885 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4886 m_pUserData = VMA_NULL;
4890 void VmaAllocation_T::BlockAllocMap()
4892 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4894 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4900 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4904 void VmaAllocation_T::BlockAllocUnmap()
4906 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4908 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4914 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4918 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4920 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4924 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4926 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4927 *ppData = m_DedicatedAllocation.m_pMappedData;
4933 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4934 return VK_ERROR_MEMORY_MAP_FAILED;
4939 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4940 hAllocator->m_hDevice,
4941 m_DedicatedAllocation.m_hMemory,
4946 if(result == VK_SUCCESS)
4948 m_DedicatedAllocation.m_pMappedData = *ppData;
4955 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4957 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4959 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4964 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4965 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4966 hAllocator->m_hDevice,
4967 m_DedicatedAllocation.m_hMemory);
4972 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4976 #if VMA_STATS_STRING_ENABLED 4979 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4988 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4992 json.WriteString(
"Blocks");
4995 json.WriteString(
"Allocations");
4998 json.WriteString(
"UnusedRanges");
5001 json.WriteString(
"UsedBytes");
5004 json.WriteString(
"UnusedBytes");
5009 json.WriteString(
"AllocationSize");
5010 json.BeginObject(
true);
5011 json.WriteString(
"Min");
5013 json.WriteString(
"Avg");
5015 json.WriteString(
"Max");
5022 json.WriteString(
"UnusedRangeSize");
5023 json.BeginObject(
true);
5024 json.WriteString(
"Min");
5026 json.WriteString(
"Avg");
5028 json.WriteString(
"Max");
5036 #endif // #if VMA_STATS_STRING_ENABLED 5038 struct VmaSuballocationItemSizeLess
5041 const VmaSuballocationList::iterator lhs,
5042 const VmaSuballocationList::iterator rhs)
const 5044 return lhs->size < rhs->size;
5047 const VmaSuballocationList::iterator lhs,
5048 VkDeviceSize rhsSize)
const 5050 return lhs->size < rhsSize;
5057 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
5061 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5062 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5066 VmaBlockMetadata::~VmaBlockMetadata()
5070 void VmaBlockMetadata::Init(VkDeviceSize size)
5074 m_SumFreeSize = size;
5076 VmaSuballocation suballoc = {};
5077 suballoc.offset = 0;
5078 suballoc.size = size;
5079 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5080 suballoc.hAllocation = VK_NULL_HANDLE;
5082 m_Suballocations.push_back(suballoc);
5083 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5085 m_FreeSuballocationsBySize.push_back(suballocItem);
5088 bool VmaBlockMetadata::Validate()
const 5090 if(m_Suballocations.empty())
5096 VkDeviceSize calculatedOffset = 0;
5098 uint32_t calculatedFreeCount = 0;
5100 VkDeviceSize calculatedSumFreeSize = 0;
5103 size_t freeSuballocationsToRegister = 0;
5105 bool prevFree =
false;
5107 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5108 suballocItem != m_Suballocations.cend();
5111 const VmaSuballocation& subAlloc = *suballocItem;
5114 if(subAlloc.offset != calculatedOffset)
5119 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5121 if(prevFree && currFree)
5126 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5133 calculatedSumFreeSize += subAlloc.size;
5134 ++calculatedFreeCount;
5135 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5137 ++freeSuballocationsToRegister;
5142 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5146 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5152 calculatedOffset += subAlloc.size;
5153 prevFree = currFree;
5158 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5163 VkDeviceSize lastSize = 0;
5164 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5166 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5169 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5174 if(suballocItem->size < lastSize)
5179 lastSize = suballocItem->size;
5183 if(!ValidateFreeSuballocationList() ||
5184 (calculatedOffset != m_Size) ||
5185 (calculatedSumFreeSize != m_SumFreeSize) ||
5186 (calculatedFreeCount != m_FreeCount))
5194 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5196 if(!m_FreeSuballocationsBySize.empty())
5198 return m_FreeSuballocationsBySize.back()->size;
5206 bool VmaBlockMetadata::IsEmpty()
const 5208 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5211 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5215 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5227 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5228 suballocItem != m_Suballocations.cend();
5231 const VmaSuballocation& suballoc = *suballocItem;
5232 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5245 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5247 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5249 inoutStats.
size += m_Size;
5256 #if VMA_STATS_STRING_ENABLED 5258 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5262 json.WriteString(
"TotalBytes");
5263 json.WriteNumber(m_Size);
5265 json.WriteString(
"UnusedBytes");
5266 json.WriteNumber(m_SumFreeSize);
5268 json.WriteString(
"Allocations");
5269 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5271 json.WriteString(
"UnusedRanges");
5272 json.WriteNumber(m_FreeCount);
5274 json.WriteString(
"Suballocations");
5277 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5278 suballocItem != m_Suballocations.cend();
5279 ++suballocItem, ++i)
5281 json.BeginObject(
true);
5283 json.WriteString(
"Type");
5284 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5286 json.WriteString(
"Size");
5287 json.WriteNumber(suballocItem->size);
5289 json.WriteString(
"Offset");
5290 json.WriteNumber(suballocItem->offset);
5292 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5294 const void* pUserData = suballocItem->hAllocation->GetUserData();
5295 if(pUserData != VMA_NULL)
5297 json.WriteString(
"UserData");
5298 if(suballocItem->hAllocation->IsUserDataString())
5300 json.WriteString((
const char*)pUserData);
5305 json.ContinueString_Pointer(pUserData);
5318 #endif // #if VMA_STATS_STRING_ENABLED 5330 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5332 VMA_ASSERT(IsEmpty());
5333 pAllocationRequest->offset = 0;
5334 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5335 pAllocationRequest->sumItemSize = 0;
5336 pAllocationRequest->item = m_Suballocations.begin();
5337 pAllocationRequest->itemsToMakeLostCount = 0;
5340 bool VmaBlockMetadata::CreateAllocationRequest(
5341 uint32_t currentFrameIndex,
5342 uint32_t frameInUseCount,
5343 VkDeviceSize bufferImageGranularity,
5344 VkDeviceSize allocSize,
5345 VkDeviceSize allocAlignment,
5346 VmaSuballocationType allocType,
5347 bool canMakeOtherLost,
5348 VmaAllocationRequest* pAllocationRequest)
5350 VMA_ASSERT(allocSize > 0);
5351 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5352 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5353 VMA_HEAVY_ASSERT(Validate());
5356 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5362 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5363 if(freeSuballocCount > 0)
5368 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5369 m_FreeSuballocationsBySize.data(),
5370 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5372 VmaSuballocationItemSizeLess());
5373 size_t index = it - m_FreeSuballocationsBySize.data();
5374 for(; index < freeSuballocCount; ++index)
5379 bufferImageGranularity,
5383 m_FreeSuballocationsBySize[index],
5385 &pAllocationRequest->offset,
5386 &pAllocationRequest->itemsToMakeLostCount,
5387 &pAllocationRequest->sumFreeSize,
5388 &pAllocationRequest->sumItemSize))
5390 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5398 for(
size_t index = freeSuballocCount; index--; )
5403 bufferImageGranularity,
5407 m_FreeSuballocationsBySize[index],
5409 &pAllocationRequest->offset,
5410 &pAllocationRequest->itemsToMakeLostCount,
5411 &pAllocationRequest->sumFreeSize,
5412 &pAllocationRequest->sumItemSize))
5414 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5421 if(canMakeOtherLost)
5425 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5426 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5428 VmaAllocationRequest tmpAllocRequest = {};
5429 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5430 suballocIt != m_Suballocations.end();
5433 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5434 suballocIt->hAllocation->CanBecomeLost())
5439 bufferImageGranularity,
5445 &tmpAllocRequest.offset,
5446 &tmpAllocRequest.itemsToMakeLostCount,
5447 &tmpAllocRequest.sumFreeSize,
5448 &tmpAllocRequest.sumItemSize))
5450 tmpAllocRequest.item = suballocIt;
5452 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5454 *pAllocationRequest = tmpAllocRequest;
5460 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5469 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5470 uint32_t currentFrameIndex,
5471 uint32_t frameInUseCount,
5472 VmaAllocationRequest* pAllocationRequest)
5474 while(pAllocationRequest->itemsToMakeLostCount > 0)
5476 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5478 ++pAllocationRequest->item;
5480 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5481 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5482 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5483 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5485 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5486 --pAllocationRequest->itemsToMakeLostCount;
5494 VMA_HEAVY_ASSERT(Validate());
5495 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5496 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5501 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5503 uint32_t lostAllocationCount = 0;
5504 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5505 it != m_Suballocations.end();
5508 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5509 it->hAllocation->CanBecomeLost() &&
5510 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5512 it = FreeSuballocation(it);
5513 ++lostAllocationCount;
5516 return lostAllocationCount;
5519 void VmaBlockMetadata::Alloc(
5520 const VmaAllocationRequest& request,
5521 VmaSuballocationType type,
5522 VkDeviceSize allocSize,
5523 VmaAllocation hAllocation)
5525 VMA_ASSERT(request.item != m_Suballocations.end());
5526 VmaSuballocation& suballoc = *request.item;
5528 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5530 VMA_ASSERT(request.offset >= suballoc.offset);
5531 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5532 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5533 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5537 UnregisterFreeSuballocation(request.item);
5539 suballoc.offset = request.offset;
5540 suballoc.size = allocSize;
5541 suballoc.type = type;
5542 suballoc.hAllocation = hAllocation;
5547 VmaSuballocation paddingSuballoc = {};
5548 paddingSuballoc.offset = request.offset + allocSize;
5549 paddingSuballoc.size = paddingEnd;
5550 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5551 VmaSuballocationList::iterator next = request.item;
5553 const VmaSuballocationList::iterator paddingEndItem =
5554 m_Suballocations.insert(next, paddingSuballoc);
5555 RegisterFreeSuballocation(paddingEndItem);
5561 VmaSuballocation paddingSuballoc = {};
5562 paddingSuballoc.offset = request.offset - paddingBegin;
5563 paddingSuballoc.size = paddingBegin;
5564 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5565 const VmaSuballocationList::iterator paddingBeginItem =
5566 m_Suballocations.insert(request.item, paddingSuballoc);
5567 RegisterFreeSuballocation(paddingBeginItem);
5571 m_FreeCount = m_FreeCount - 1;
5572 if(paddingBegin > 0)
5580 m_SumFreeSize -= allocSize;
5583 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5585 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5586 suballocItem != m_Suballocations.end();
5589 VmaSuballocation& suballoc = *suballocItem;
5590 if(suballoc.hAllocation == allocation)
5592 FreeSuballocation(suballocItem);
5593 VMA_HEAVY_ASSERT(Validate());
5597 VMA_ASSERT(0 &&
"Not found!");
5600 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5602 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5603 suballocItem != m_Suballocations.end();
5606 VmaSuballocation& suballoc = *suballocItem;
5607 if(suballoc.offset == offset)
5609 FreeSuballocation(suballocItem);
5613 VMA_ASSERT(0 &&
"Not found!");
5616 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5618 VkDeviceSize lastSize = 0;
5619 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5621 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5623 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5628 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5633 if(it->size < lastSize)
5639 lastSize = it->size;
5644 bool VmaBlockMetadata::CheckAllocation(
5645 uint32_t currentFrameIndex,
5646 uint32_t frameInUseCount,
5647 VkDeviceSize bufferImageGranularity,
5648 VkDeviceSize allocSize,
5649 VkDeviceSize allocAlignment,
5650 VmaSuballocationType allocType,
5651 VmaSuballocationList::const_iterator suballocItem,
5652 bool canMakeOtherLost,
5653 VkDeviceSize* pOffset,
5654 size_t* itemsToMakeLostCount,
5655 VkDeviceSize* pSumFreeSize,
5656 VkDeviceSize* pSumItemSize)
const 5658 VMA_ASSERT(allocSize > 0);
5659 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5660 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5661 VMA_ASSERT(pOffset != VMA_NULL);
5663 *itemsToMakeLostCount = 0;
5667 if(canMakeOtherLost)
5669 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5671 *pSumFreeSize = suballocItem->size;
5675 if(suballocItem->hAllocation->CanBecomeLost() &&
5676 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5678 ++*itemsToMakeLostCount;
5679 *pSumItemSize = suballocItem->size;
5688 if(m_Size - suballocItem->offset < allocSize)
5694 *pOffset = suballocItem->offset;
5697 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5699 *pOffset += VMA_DEBUG_MARGIN;
5703 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5704 *pOffset = VmaAlignUp(*pOffset, alignment);
5708 if(bufferImageGranularity > 1)
5710 bool bufferImageGranularityConflict =
false;
5711 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5712 while(prevSuballocItem != m_Suballocations.cbegin())
5715 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5716 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5718 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5720 bufferImageGranularityConflict =
true;
5728 if(bufferImageGranularityConflict)
5730 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5736 if(*pOffset >= suballocItem->offset + suballocItem->size)
5742 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5745 VmaSuballocationList::const_iterator next = suballocItem;
5747 const VkDeviceSize requiredEndMargin =
5748 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5750 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5752 if(suballocItem->offset + totalSize > m_Size)
5759 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5760 if(totalSize > suballocItem->size)
5762 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5763 while(remainingSize > 0)
5766 if(lastSuballocItem == m_Suballocations.cend())
5770 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5772 *pSumFreeSize += lastSuballocItem->size;
5776 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5777 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5778 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5780 ++*itemsToMakeLostCount;
5781 *pSumItemSize += lastSuballocItem->size;
5788 remainingSize = (lastSuballocItem->size < remainingSize) ?
5789 remainingSize - lastSuballocItem->size : 0;
5795 if(bufferImageGranularity > 1)
5797 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5799 while(nextSuballocItem != m_Suballocations.cend())
5801 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5802 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5804 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5806 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5807 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5808 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5810 ++*itemsToMakeLostCount;
5829 const VmaSuballocation& suballoc = *suballocItem;
5830 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5832 *pSumFreeSize = suballoc.size;
5835 if(suballoc.size < allocSize)
5841 *pOffset = suballoc.offset;
5844 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5846 *pOffset += VMA_DEBUG_MARGIN;
5850 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5851 *pOffset = VmaAlignUp(*pOffset, alignment);
5855 if(bufferImageGranularity > 1)
5857 bool bufferImageGranularityConflict =
false;
5858 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5859 while(prevSuballocItem != m_Suballocations.cbegin())
5862 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5863 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5865 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5867 bufferImageGranularityConflict =
true;
5875 if(bufferImageGranularityConflict)
5877 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5882 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5885 VmaSuballocationList::const_iterator next = suballocItem;
5887 const VkDeviceSize requiredEndMargin =
5888 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5891 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5898 if(bufferImageGranularity > 1)
5900 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5902 while(nextSuballocItem != m_Suballocations.cend())
5904 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5905 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5907 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5926 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5928 VMA_ASSERT(item != m_Suballocations.end());
5929 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5931 VmaSuballocationList::iterator nextItem = item;
5933 VMA_ASSERT(nextItem != m_Suballocations.end());
5934 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5936 item->size += nextItem->size;
5938 m_Suballocations.erase(nextItem);
5941 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5944 VmaSuballocation& suballoc = *suballocItem;
5945 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5946 suballoc.hAllocation = VK_NULL_HANDLE;
5950 m_SumFreeSize += suballoc.size;
5953 bool mergeWithNext =
false;
5954 bool mergeWithPrev =
false;
5956 VmaSuballocationList::iterator nextItem = suballocItem;
5958 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5960 mergeWithNext =
true;
5963 VmaSuballocationList::iterator prevItem = suballocItem;
5964 if(suballocItem != m_Suballocations.begin())
5967 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5969 mergeWithPrev =
true;
5975 UnregisterFreeSuballocation(nextItem);
5976 MergeFreeWithNext(suballocItem);
5981 UnregisterFreeSuballocation(prevItem);
5982 MergeFreeWithNext(prevItem);
5983 RegisterFreeSuballocation(prevItem);
5988 RegisterFreeSuballocation(suballocItem);
5989 return suballocItem;
5993 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5995 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5996 VMA_ASSERT(item->size > 0);
6000 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6002 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6004 if(m_FreeSuballocationsBySize.empty())
6006 m_FreeSuballocationsBySize.push_back(item);
6010 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
6018 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
6020 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6021 VMA_ASSERT(item->size > 0);
6025 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6027 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6029 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6030 m_FreeSuballocationsBySize.data(),
6031 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
6033 VmaSuballocationItemSizeLess());
6034 for(
size_t index = it - m_FreeSuballocationsBySize.data();
6035 index < m_FreeSuballocationsBySize.size();
6038 if(m_FreeSuballocationsBySize[index] == item)
6040 VmaVectorRemove(m_FreeSuballocationsBySize, index);
6043 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
6045 VMA_ASSERT(0 &&
"Not found.");
6054 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
6056 m_pMappedData(VMA_NULL)
6060 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
6062 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6065 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData)
6072 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6075 m_MapCount += count;
6076 VMA_ASSERT(m_pMappedData != VMA_NULL);
6077 if(ppData != VMA_NULL)
6079 *ppData = m_pMappedData;
6085 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6086 hAllocator->m_hDevice,
6092 if(result == VK_SUCCESS)
6094 if(ppData != VMA_NULL)
6096 *ppData = m_pMappedData;
6104 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count)
6111 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6112 if(m_MapCount >= count)
6114 m_MapCount -= count;
6117 m_pMappedData = VMA_NULL;
6118 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
6123 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6130 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
6131 m_MemoryTypeIndex(UINT32_MAX),
6132 m_hMemory(VK_NULL_HANDLE),
6133 m_Metadata(hAllocator)
6137 void VmaDeviceMemoryBlock::Init(
6138 uint32_t newMemoryTypeIndex,
6139 VkDeviceMemory newMemory,
6140 VkDeviceSize newSize)
6142 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6144 m_MemoryTypeIndex = newMemoryTypeIndex;
6145 m_hMemory = newMemory;
6147 m_Metadata.Init(newSize);
6150 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
6154 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6156 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6157 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6158 m_hMemory = VK_NULL_HANDLE;
6161 bool VmaDeviceMemoryBlock::Validate()
const 6163 if((m_hMemory == VK_NULL_HANDLE) ||
6164 (m_Metadata.GetSize() == 0))
6169 return m_Metadata.Validate();
6172 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count,
void** ppData)
6174 return m_Mapping.Map(hAllocator, m_hMemory, count, ppData);
6177 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
6179 m_Mapping.Unmap(hAllocator, m_hMemory, count);
6184 memset(&outInfo, 0,
sizeof(outInfo));
6203 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6211 VmaPool_T::VmaPool_T(
6212 VmaAllocator hAllocator,
6216 createInfo.memoryTypeIndex,
6217 createInfo.blockSize,
6218 createInfo.minBlockCount,
6219 createInfo.maxBlockCount,
6221 createInfo.frameInUseCount,
6226 VmaPool_T::~VmaPool_T()
6230 #if VMA_STATS_STRING_ENABLED 6232 #endif // #if VMA_STATS_STRING_ENABLED 6234 VmaBlockVector::VmaBlockVector(
6235 VmaAllocator hAllocator,
6236 uint32_t memoryTypeIndex,
6237 VkDeviceSize preferredBlockSize,
6238 size_t minBlockCount,
6239 size_t maxBlockCount,
6240 VkDeviceSize bufferImageGranularity,
6241 uint32_t frameInUseCount,
6242 bool isCustomPool) :
6243 m_hAllocator(hAllocator),
6244 m_MemoryTypeIndex(memoryTypeIndex),
6245 m_PreferredBlockSize(preferredBlockSize),
6246 m_MinBlockCount(minBlockCount),
6247 m_MaxBlockCount(maxBlockCount),
6248 m_BufferImageGranularity(bufferImageGranularity),
6249 m_FrameInUseCount(frameInUseCount),
6250 m_IsCustomPool(isCustomPool),
6251 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6252 m_HasEmptyBlock(false),
6253 m_pDefragmentator(VMA_NULL)
6257 VmaBlockVector::~VmaBlockVector()
6259 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6261 for(
size_t i = m_Blocks.size(); i--; )
6263 m_Blocks[i]->Destroy(m_hAllocator);
6264 vma_delete(m_hAllocator, m_Blocks[i]);
6268 VkResult VmaBlockVector::CreateMinBlocks()
6270 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6272 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6273 if(res != VK_SUCCESS)
6281 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6289 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6291 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6293 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6295 VMA_HEAVY_ASSERT(pBlock->Validate());
6296 pBlock->m_Metadata.AddPoolStats(*pStats);
6300 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6302 VkResult VmaBlockVector::Allocate(
6303 VmaPool hCurrentPool,
6304 uint32_t currentFrameIndex,
6305 const VkMemoryRequirements& vkMemReq,
6307 VmaSuballocationType suballocType,
6308 VmaAllocation* pAllocation)
6313 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6317 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6319 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6320 VMA_ASSERT(pCurrBlock);
6321 VmaAllocationRequest currRequest = {};
6322 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6325 m_BufferImageGranularity,
6333 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6337 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6338 if(res != VK_SUCCESS)
6345 if(pCurrBlock->m_Metadata.IsEmpty())
6347 m_HasEmptyBlock =
false;
6350 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6351 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6352 (*pAllocation)->InitBlockAllocation(
6361 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6362 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6363 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6368 const bool canCreateNewBlock =
6370 (m_Blocks.size() < m_MaxBlockCount);
6373 if(canCreateNewBlock)
6376 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6377 uint32_t newBlockSizeShift = 0;
6378 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6382 if(m_IsCustomPool ==
false)
6385 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6386 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6388 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6389 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6391 newBlockSize = smallerNewBlockSize;
6392 ++newBlockSizeShift;
6401 size_t newBlockIndex = 0;
6402 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6404 if(m_IsCustomPool ==
false)
6406 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6408 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6409 if(smallerNewBlockSize >= vkMemReq.size)
6411 newBlockSize = smallerNewBlockSize;
6412 ++newBlockSizeShift;
6413 res = CreateBlock(newBlockSize, &newBlockIndex);
6422 if(res == VK_SUCCESS)
6424 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6425 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6429 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6430 if(res != VK_SUCCESS)
6437 VmaAllocationRequest allocRequest;
6438 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6439 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6440 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6441 (*pAllocation)->InitBlockAllocation(
6444 allocRequest.offset,
6450 VMA_HEAVY_ASSERT(pBlock->Validate());
6451 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6452 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6460 if(canMakeOtherLost)
6462 uint32_t tryIndex = 0;
6463 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6465 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6466 VmaAllocationRequest bestRequest = {};
6467 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6471 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6473 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6474 VMA_ASSERT(pCurrBlock);
6475 VmaAllocationRequest currRequest = {};
6476 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6479 m_BufferImageGranularity,
6486 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6487 if(pBestRequestBlock == VMA_NULL ||
6488 currRequestCost < bestRequestCost)
6490 pBestRequestBlock = pCurrBlock;
6491 bestRequest = currRequest;
6492 bestRequestCost = currRequestCost;
6494 if(bestRequestCost == 0)
6502 if(pBestRequestBlock != VMA_NULL)
6506 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6507 if(res != VK_SUCCESS)
6513 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6519 if(pBestRequestBlock->m_Metadata.IsEmpty())
6521 m_HasEmptyBlock =
false;
6524 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6525 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6526 (*pAllocation)->InitBlockAllocation(
6535 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6536 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6537 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6551 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6553 return VK_ERROR_TOO_MANY_OBJECTS;
6557 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6560 void VmaBlockVector::Free(
6561 VmaAllocation hAllocation)
6563 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6567 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6569 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6571 if(hAllocation->IsPersistentMap())
6573 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory, 1);
6576 pBlock->m_Metadata.Free(hAllocation);
6577 VMA_HEAVY_ASSERT(pBlock->Validate());
6579 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6582 if(pBlock->m_Metadata.IsEmpty())
6585 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6587 pBlockToDelete = pBlock;
6593 m_HasEmptyBlock =
true;
6598 else if(m_HasEmptyBlock)
6600 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6601 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6603 pBlockToDelete = pLastBlock;
6604 m_Blocks.pop_back();
6605 m_HasEmptyBlock =
false;
6609 IncrementallySortBlocks();
6614 if(pBlockToDelete != VMA_NULL)
6616 VMA_DEBUG_LOG(
" Deleted empty allocation");
6617 pBlockToDelete->Destroy(m_hAllocator);
6618 vma_delete(m_hAllocator, pBlockToDelete);
6622 size_t VmaBlockVector::CalcMaxBlockSize()
const 6625 for(
size_t i = m_Blocks.size(); i--; )
6627 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6628 if(result >= m_PreferredBlockSize)
6636 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6638 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6640 if(m_Blocks[blockIndex] == pBlock)
6642 VmaVectorRemove(m_Blocks, blockIndex);
6649 void VmaBlockVector::IncrementallySortBlocks()
6652 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6654 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6656 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6662 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6664 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6665 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6666 allocInfo.allocationSize = blockSize;
6667 VkDeviceMemory mem = VK_NULL_HANDLE;
6668 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6677 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6681 allocInfo.allocationSize);
6683 m_Blocks.push_back(pBlock);
6684 if(pNewBlockIndex != VMA_NULL)
6686 *pNewBlockIndex = m_Blocks.size() - 1;
6692 #if VMA_STATS_STRING_ENABLED 6694 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6696 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6702 json.WriteString(
"MemoryTypeIndex");
6703 json.WriteNumber(m_MemoryTypeIndex);
6705 json.WriteString(
"BlockSize");
6706 json.WriteNumber(m_PreferredBlockSize);
6708 json.WriteString(
"BlockCount");
6709 json.BeginObject(
true);
6710 if(m_MinBlockCount > 0)
6712 json.WriteString(
"Min");
6713 json.WriteNumber((uint64_t)m_MinBlockCount);
6715 if(m_MaxBlockCount < SIZE_MAX)
6717 json.WriteString(
"Max");
6718 json.WriteNumber((uint64_t)m_MaxBlockCount);
6720 json.WriteString(
"Cur");
6721 json.WriteNumber((uint64_t)m_Blocks.size());
6724 if(m_FrameInUseCount > 0)
6726 json.WriteString(
"FrameInUseCount");
6727 json.WriteNumber(m_FrameInUseCount);
6732 json.WriteString(
"PreferredBlockSize");
6733 json.WriteNumber(m_PreferredBlockSize);
6736 json.WriteString(
"Blocks");
6738 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6740 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6747 #endif // #if VMA_STATS_STRING_ENABLED 6749 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6750 VmaAllocator hAllocator,
6751 uint32_t currentFrameIndex)
6753 if(m_pDefragmentator == VMA_NULL)
6755 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6761 return m_pDefragmentator;
6764 VkResult VmaBlockVector::Defragment(
6766 VkDeviceSize& maxBytesToMove,
6767 uint32_t& maxAllocationsToMove)
6769 if(m_pDefragmentator == VMA_NULL)
6774 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6777 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6780 if(pDefragmentationStats != VMA_NULL)
6782 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6783 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6786 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6787 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6793 m_HasEmptyBlock =
false;
6794 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6796 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6797 if(pBlock->m_Metadata.IsEmpty())
6799 if(m_Blocks.size() > m_MinBlockCount)
6801 if(pDefragmentationStats != VMA_NULL)
6804 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6807 VmaVectorRemove(m_Blocks, blockIndex);
6808 pBlock->Destroy(m_hAllocator);
6809 vma_delete(m_hAllocator, pBlock);
6813 m_HasEmptyBlock =
true;
6821 void VmaBlockVector::DestroyDefragmentator()
6823 if(m_pDefragmentator != VMA_NULL)
6825 vma_delete(m_hAllocator, m_pDefragmentator);
6826 m_pDefragmentator = VMA_NULL;
6830 void VmaBlockVector::MakePoolAllocationsLost(
6831 uint32_t currentFrameIndex,
6832 size_t* pLostAllocationCount)
6834 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6835 size_t lostAllocationCount = 0;
6836 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6838 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6840 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6842 if(pLostAllocationCount != VMA_NULL)
6844 *pLostAllocationCount = lostAllocationCount;
6848 void VmaBlockVector::AddStats(
VmaStats* pStats)
6850 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6851 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6853 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6855 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6857 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6859 VMA_HEAVY_ASSERT(pBlock->Validate());
6861 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6862 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6863 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6864 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6871 VmaDefragmentator::VmaDefragmentator(
6872 VmaAllocator hAllocator,
6873 VmaBlockVector* pBlockVector,
6874 uint32_t currentFrameIndex) :
6875 m_hAllocator(hAllocator),
6876 m_pBlockVector(pBlockVector),
6877 m_CurrentFrameIndex(currentFrameIndex),
6879 m_AllocationsMoved(0),
6880 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6881 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6885 VmaDefragmentator::~VmaDefragmentator()
6887 for(
size_t i = m_Blocks.size(); i--; )
6889 vma_delete(m_hAllocator, m_Blocks[i]);
6893 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6895 AllocationInfo allocInfo;
6896 allocInfo.m_hAllocation = hAlloc;
6897 allocInfo.m_pChanged = pChanged;
6898 m_Allocations.push_back(allocInfo);
6901 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6904 if(m_pMappedDataForDefragmentation)
6906 *ppMappedData = m_pMappedDataForDefragmentation;
6911 if(m_pBlock->m_Mapping.GetMappedData())
6913 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6918 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
6919 *ppMappedData = m_pMappedDataForDefragmentation;
6923 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6925 if(m_pMappedDataForDefragmentation != VMA_NULL)
6927 m_pBlock->Unmap(hAllocator, 1);
6931 VkResult VmaDefragmentator::DefragmentRound(
6932 VkDeviceSize maxBytesToMove,
6933 uint32_t maxAllocationsToMove)
6935 if(m_Blocks.empty())
6940 size_t srcBlockIndex = m_Blocks.size() - 1;
6941 size_t srcAllocIndex = SIZE_MAX;
6947 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6949 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6952 if(srcBlockIndex == 0)
6959 srcAllocIndex = SIZE_MAX;
6964 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6968 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6969 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6971 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6972 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6973 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6974 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6977 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6979 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6980 VmaAllocationRequest dstAllocRequest;
6981 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6982 m_CurrentFrameIndex,
6983 m_pBlockVector->GetFrameInUseCount(),
6984 m_pBlockVector->GetBufferImageGranularity(),
6989 &dstAllocRequest) &&
6991 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6993 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6996 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6997 (m_BytesMoved + size > maxBytesToMove))
6999 return VK_INCOMPLETE;
7002 void* pDstMappedData = VMA_NULL;
7003 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
7004 if(res != VK_SUCCESS)
7009 void* pSrcMappedData = VMA_NULL;
7010 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
7011 if(res != VK_SUCCESS)
7018 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
7019 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
7020 static_cast<size_t>(size));
7022 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
7023 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
7025 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
7027 if(allocInfo.m_pChanged != VMA_NULL)
7029 *allocInfo.m_pChanged = VK_TRUE;
7032 ++m_AllocationsMoved;
7033 m_BytesMoved += size;
7035 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
7043 if(srcAllocIndex > 0)
7049 if(srcBlockIndex > 0)
7052 srcAllocIndex = SIZE_MAX;
7062 VkResult VmaDefragmentator::Defragment(
7063 VkDeviceSize maxBytesToMove,
7064 uint32_t maxAllocationsToMove)
7066 if(m_Allocations.empty())
7072 const size_t blockCount = m_pBlockVector->m_Blocks.size();
7073 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7075 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7076 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7077 m_Blocks.push_back(pBlockInfo);
7081 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7084 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7086 AllocationInfo& allocInfo = m_Allocations[blockIndex];
7088 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7090 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7091 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7092 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7094 (*it)->m_Allocations.push_back(allocInfo);
7102 m_Allocations.clear();
7104 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7106 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7107 pBlockInfo->CalcHasNonMovableAllocations();
7108 pBlockInfo->SortAllocationsBySizeDescecnding();
7112 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7115 VkResult result = VK_SUCCESS;
7116 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7118 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7122 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7124 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7130 bool VmaDefragmentator::MoveMakesSense(
7131 size_t dstBlockIndex, VkDeviceSize dstOffset,
7132 size_t srcBlockIndex, VkDeviceSize srcOffset)
7134 if(dstBlockIndex < srcBlockIndex)
7138 if(dstBlockIndex > srcBlockIndex)
7142 if(dstOffset < srcOffset)
7155 m_hDevice(pCreateInfo->device),
7156 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7157 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7158 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7159 m_PreferredLargeHeapBlockSize(0),
7160 m_PhysicalDevice(pCreateInfo->physicalDevice),
7161 m_CurrentFrameIndex(0),
7162 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
7166 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7167 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7168 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7170 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7171 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7173 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7175 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7186 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7187 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7194 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7196 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7197 if(limit != VK_WHOLE_SIZE)
7199 m_HeapSizeLimit[heapIndex] = limit;
7200 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7202 m_MemProps.memoryHeaps[heapIndex].size = limit;
7208 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7210 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7212 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7218 GetBufferImageGranularity(),
7223 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7227 VmaAllocator_T::~VmaAllocator_T()
7229 VMA_ASSERT(m_Pools.empty());
7231 for(
size_t i = GetMemoryTypeCount(); i--; )
7233 vma_delete(
this, m_pDedicatedAllocations[i]);
7234 vma_delete(
this, m_pBlockVectors[i]);
7238 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7240 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7241 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7242 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7243 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7244 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7245 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7246 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7247 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7248 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7249 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7250 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7251 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7252 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7253 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7254 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7255 if(m_UseKhrDedicatedAllocation)
7257 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7258 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7259 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7260 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7262 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7264 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7265 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7267 if(pVulkanFunctions != VMA_NULL)
7269 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7270 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7271 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7272 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7273 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7274 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7275 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7276 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7277 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7278 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7279 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7280 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7281 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7282 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7283 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7284 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7287 #undef VMA_COPY_IF_NOT_NULL 7291 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7292 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7293 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7294 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7295 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7296 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7297 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7298 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7299 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7300 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7301 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7302 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7303 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7304 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7305 if(m_UseKhrDedicatedAllocation)
7307 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7308 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7312 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7314 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7315 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7316 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7317 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7320 VkResult VmaAllocator_T::AllocateMemoryOfType(
7321 const VkMemoryRequirements& vkMemReq,
7322 bool dedicatedAllocation,
7323 VkBuffer dedicatedBuffer,
7324 VkImage dedicatedImage,
7326 uint32_t memTypeIndex,
7327 VmaSuballocationType suballocType,
7328 VmaAllocation* pAllocation)
7330 VMA_ASSERT(pAllocation != VMA_NULL);
7331 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7337 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7342 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7343 VMA_ASSERT(blockVector);
7345 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7346 bool preferDedicatedMemory =
7347 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7348 dedicatedAllocation ||
7350 vkMemReq.size > preferredBlockSize / 2;
7352 if(preferDedicatedMemory &&
7354 finalCreateInfo.
pool == VK_NULL_HANDLE)
7363 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7367 return AllocateDedicatedMemory(
7381 VkResult res = blockVector->Allocate(
7383 m_CurrentFrameIndex.load(),
7388 if(res == VK_SUCCESS)
7396 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7400 res = AllocateDedicatedMemory(
7406 finalCreateInfo.pUserData,
7410 if(res == VK_SUCCESS)
7413 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7419 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7426 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7428 VmaSuballocationType suballocType,
7429 uint32_t memTypeIndex,
7431 bool isUserDataString,
7433 VkBuffer dedicatedBuffer,
7434 VkImage dedicatedImage,
7435 VmaAllocation* pAllocation)
7437 VMA_ASSERT(pAllocation);
7439 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7440 allocInfo.memoryTypeIndex = memTypeIndex;
7441 allocInfo.allocationSize = size;
7443 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7444 if(m_UseKhrDedicatedAllocation)
7446 if(dedicatedBuffer != VK_NULL_HANDLE)
7448 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7449 dedicatedAllocInfo.buffer = dedicatedBuffer;
7450 allocInfo.pNext = &dedicatedAllocInfo;
7452 else if(dedicatedImage != VK_NULL_HANDLE)
7454 dedicatedAllocInfo.image = dedicatedImage;
7455 allocInfo.pNext = &dedicatedAllocInfo;
7460 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7461 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7464 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7468 void* pMappedData = VMA_NULL;
7471 res = (*m_VulkanFunctions.vkMapMemory)(
7480 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7481 FreeVulkanMemory(memTypeIndex, size, hMemory);
7486 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7487 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7488 (*pAllocation)->SetUserData(
this, pUserData);
7492 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7493 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7494 VMA_ASSERT(pDedicatedAllocations);
7495 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7498 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7503 void VmaAllocator_T::GetBufferMemoryRequirements(
7505 VkMemoryRequirements& memReq,
7506 bool& requiresDedicatedAllocation,
7507 bool& prefersDedicatedAllocation)
const 7509 if(m_UseKhrDedicatedAllocation)
7511 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7512 memReqInfo.buffer = hBuffer;
7514 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7516 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7517 memReq2.pNext = &memDedicatedReq;
7519 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7521 memReq = memReq2.memoryRequirements;
7522 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7523 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7527 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7528 requiresDedicatedAllocation =
false;
7529 prefersDedicatedAllocation =
false;
7533 void VmaAllocator_T::GetImageMemoryRequirements(
7535 VkMemoryRequirements& memReq,
7536 bool& requiresDedicatedAllocation,
7537 bool& prefersDedicatedAllocation)
const 7539 if(m_UseKhrDedicatedAllocation)
7541 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7542 memReqInfo.image = hImage;
7544 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7546 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7547 memReq2.pNext = &memDedicatedReq;
7549 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7551 memReq = memReq2.memoryRequirements;
7552 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7553 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7557 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7558 requiresDedicatedAllocation =
false;
7559 prefersDedicatedAllocation =
false;
7563 VkResult VmaAllocator_T::AllocateMemory(
7564 const VkMemoryRequirements& vkMemReq,
7565 bool requiresDedicatedAllocation,
7566 bool prefersDedicatedAllocation,
7567 VkBuffer dedicatedBuffer,
7568 VkImage dedicatedImage,
7570 VmaSuballocationType suballocType,
7571 VmaAllocation* pAllocation)
7576 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7577 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7582 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7583 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7585 if(requiresDedicatedAllocation)
7589 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7590 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7592 if(createInfo.
pool != VK_NULL_HANDLE)
7594 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7595 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7598 if((createInfo.
pool != VK_NULL_HANDLE) &&
7601 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7602 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7605 if(createInfo.
pool != VK_NULL_HANDLE)
7607 return createInfo.
pool->m_BlockVector.Allocate(
7609 m_CurrentFrameIndex.load(),
7618 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7619 uint32_t memTypeIndex = UINT32_MAX;
7621 if(res == VK_SUCCESS)
7623 res = AllocateMemoryOfType(
7625 requiresDedicatedAllocation || prefersDedicatedAllocation,
7633 if(res == VK_SUCCESS)
7643 memoryTypeBits &= ~(1u << memTypeIndex);
7646 if(res == VK_SUCCESS)
7648 res = AllocateMemoryOfType(
7650 requiresDedicatedAllocation || prefersDedicatedAllocation,
7658 if(res == VK_SUCCESS)
7668 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7679 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7681 VMA_ASSERT(allocation);
7683 if(allocation->CanBecomeLost() ==
false ||
7684 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7686 switch(allocation->GetType())
7688 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7690 VmaBlockVector* pBlockVector = VMA_NULL;
7691 VmaPool hPool = allocation->GetPool();
7692 if(hPool != VK_NULL_HANDLE)
7694 pBlockVector = &hPool->m_BlockVector;
7698 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7699 pBlockVector = m_pBlockVectors[memTypeIndex];
7701 pBlockVector->Free(allocation);
7704 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7705 FreeDedicatedMemory(allocation);
7712 allocation->SetUserData(
this, VMA_NULL);
7713 vma_delete(
this, allocation);
7716 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7719 InitStatInfo(pStats->
total);
7720 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7722 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7726 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7728 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7729 VMA_ASSERT(pBlockVector);
7730 pBlockVector->AddStats(pStats);
7735 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7736 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7738 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7743 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7745 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7746 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7747 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7748 VMA_ASSERT(pDedicatedAllocVector);
7749 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7752 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7753 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7754 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7755 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7760 VmaPostprocessCalcStatInfo(pStats->
total);
7761 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7762 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7763 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7764 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7767 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7769 VkResult VmaAllocator_T::Defragment(
7770 VmaAllocation* pAllocations,
7771 size_t allocationCount,
7772 VkBool32* pAllocationsChanged,
7776 if(pAllocationsChanged != VMA_NULL)
7778 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7780 if(pDefragmentationStats != VMA_NULL)
7782 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7785 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7787 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7789 const size_t poolCount = m_Pools.size();
7792 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7794 VmaAllocation hAlloc = pAllocations[allocIndex];
7796 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7798 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7800 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7802 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7804 VmaBlockVector* pAllocBlockVector = VMA_NULL;
7806 const VmaPool hAllocPool = hAlloc->GetPool();
7808 if(hAllocPool != VK_NULL_HANDLE)
7810 pAllocBlockVector = &hAllocPool->GetBlockVector();
7815 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7818 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7820 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7821 &pAllocationsChanged[allocIndex] : VMA_NULL;
7822 pDefragmentator->AddAllocation(hAlloc, pChanged);
7826 VkResult result = VK_SUCCESS;
7830 VkDeviceSize maxBytesToMove = SIZE_MAX;
7831 uint32_t maxAllocationsToMove = UINT32_MAX;
7832 if(pDefragmentationInfo != VMA_NULL)
7839 for(uint32_t memTypeIndex = 0;
7840 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7844 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7846 result = m_pBlockVectors[memTypeIndex]->Defragment(
7847 pDefragmentationStats,
7849 maxAllocationsToMove);
7854 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7856 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7857 pDefragmentationStats,
7859 maxAllocationsToMove);
7865 for(
size_t poolIndex = poolCount; poolIndex--; )
7867 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7871 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7873 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7875 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7882 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7884 if(hAllocation->CanBecomeLost())
7890 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7891 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7894 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7898 pAllocationInfo->
offset = 0;
7899 pAllocationInfo->
size = hAllocation->GetSize();
7901 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7904 else if(localLastUseFrameIndex == localCurrFrameIndex)
7906 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7907 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7908 pAllocationInfo->
offset = hAllocation->GetOffset();
7909 pAllocationInfo->
size = hAllocation->GetSize();
7911 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7916 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7918 localLastUseFrameIndex = localCurrFrameIndex;
7925 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7926 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7927 pAllocationInfo->
offset = hAllocation->GetOffset();
7928 pAllocationInfo->
size = hAllocation->GetSize();
7929 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7930 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7934 bool VmaAllocator_T::TouchAllocation(VmaAllocation hAllocation)
7937 if(hAllocation->CanBecomeLost())
7939 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7940 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7943 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7947 else if(localLastUseFrameIndex == localCurrFrameIndex)
7953 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7955 localLastUseFrameIndex = localCurrFrameIndex;
7966 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7968 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7981 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7983 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7984 if(res != VK_SUCCESS)
7986 vma_delete(
this, *pPool);
7993 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7994 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
8000 void VmaAllocator_T::DestroyPool(VmaPool pool)
8004 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8005 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
8006 VMA_ASSERT(success &&
"Pool not found in Allocator.");
8009 vma_delete(
this, pool);
8012 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
8014 pool->m_BlockVector.GetPoolStats(pPoolStats);
8017 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
8019 m_CurrentFrameIndex.store(frameIndex);
8022 void VmaAllocator_T::MakePoolAllocationsLost(
8024 size_t* pLostAllocationCount)
8026 hPool->m_BlockVector.MakePoolAllocationsLost(
8027 m_CurrentFrameIndex.load(),
8028 pLostAllocationCount);
8031 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
8033 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
8034 (*pAllocation)->InitLost();
8037 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
8039 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
8042 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8044 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8045 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
8047 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8048 if(res == VK_SUCCESS)
8050 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
8055 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
8060 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8063 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
8065 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
8071 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
8073 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
8075 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
8078 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8080 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8081 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8083 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8084 m_HeapSizeLimit[heapIndex] += size;
8088 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
8090 if(hAllocation->CanBecomeLost())
8092 return VK_ERROR_MEMORY_MAP_FAILED;
8095 switch(hAllocation->GetType())
8097 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8099 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8100 char *pBytes = VMA_NULL;
8101 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
8102 if(res == VK_SUCCESS)
8104 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8105 hAllocation->BlockAllocMap();
8109 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8110 return hAllocation->DedicatedAllocMap(
this, ppData);
8113 return VK_ERROR_MEMORY_MAP_FAILED;
8117 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
8119 switch(hAllocation->GetType())
8121 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8123 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8124 hAllocation->BlockAllocUnmap();
8125 pBlock->Unmap(
this, 1);
8128 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8129 hAllocation->DedicatedAllocUnmap(
this);
8136 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
8138 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8140 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8142 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8143 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8144 VMA_ASSERT(pDedicatedAllocations);
8145 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8146 VMA_ASSERT(success);
8149 VkDeviceMemory hMemory = allocation->GetMemory();
8151 if(allocation->GetMappedData() != VMA_NULL)
8153 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8156 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8158 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8161 #if VMA_STATS_STRING_ENABLED 8163 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8165 bool dedicatedAllocationsStarted =
false;
8166 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8168 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8169 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8170 VMA_ASSERT(pDedicatedAllocVector);
8171 if(pDedicatedAllocVector->empty() ==
false)
8173 if(dedicatedAllocationsStarted ==
false)
8175 dedicatedAllocationsStarted =
true;
8176 json.WriteString(
"DedicatedAllocations");
8180 json.BeginString(
"Type ");
8181 json.ContinueString(memTypeIndex);
8186 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8188 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
8189 json.BeginObject(
true);
8191 json.WriteString(
"Type");
8192 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
8194 json.WriteString(
"Size");
8195 json.WriteNumber(hAlloc->GetSize());
8197 const void* pUserData = hAlloc->GetUserData();
8198 if(pUserData != VMA_NULL)
8200 json.WriteString(
"UserData");
8201 if(hAlloc->IsUserDataString())
8203 json.WriteString((
const char*)pUserData);
8208 json.ContinueString_Pointer(pUserData);
8219 if(dedicatedAllocationsStarted)
8225 bool allocationsStarted =
false;
8226 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8228 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8230 if(allocationsStarted ==
false)
8232 allocationsStarted =
true;
8233 json.WriteString(
"DefaultPools");
8237 json.BeginString(
"Type ");
8238 json.ContinueString(memTypeIndex);
8241 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8244 if(allocationsStarted)
8251 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8252 const size_t poolCount = m_Pools.size();
8255 json.WriteString(
"Pools");
8257 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8259 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8266 #endif // #if VMA_STATS_STRING_ENABLED 8268 static VkResult AllocateMemoryForImage(
8269 VmaAllocator allocator,
8272 VmaSuballocationType suballocType,
8273 VmaAllocation* pAllocation)
8275 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8277 VkMemoryRequirements vkMemReq = {};
8278 bool requiresDedicatedAllocation =
false;
8279 bool prefersDedicatedAllocation =
false;
8280 allocator->GetImageMemoryRequirements(image, vkMemReq,
8281 requiresDedicatedAllocation, prefersDedicatedAllocation);
8283 return allocator->AllocateMemory(
8285 requiresDedicatedAllocation,
8286 prefersDedicatedAllocation,
8289 *pAllocationCreateInfo,
8299 VmaAllocator* pAllocator)
8301 VMA_ASSERT(pCreateInfo && pAllocator);
8302 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8308 VmaAllocator allocator)
8310 if(allocator != VK_NULL_HANDLE)
8312 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8313 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8314 vma_delete(&allocationCallbacks, allocator);
8319 VmaAllocator allocator,
8320 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8322 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8323 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8327 VmaAllocator allocator,
8328 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8330 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8331 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8335 VmaAllocator allocator,
8336 uint32_t memoryTypeIndex,
8337 VkMemoryPropertyFlags* pFlags)
8339 VMA_ASSERT(allocator && pFlags);
8340 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8341 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8345 VmaAllocator allocator,
8346 uint32_t frameIndex)
8348 VMA_ASSERT(allocator);
8349 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8351 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8353 allocator->SetCurrentFrameIndex(frameIndex);
8357 VmaAllocator allocator,
8360 VMA_ASSERT(allocator && pStats);
8361 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8362 allocator->CalculateStats(pStats);
8365 #if VMA_STATS_STRING_ENABLED 8368 VmaAllocator allocator,
8369 char** ppStatsString,
8370 VkBool32 detailedMap)
8372 VMA_ASSERT(allocator && ppStatsString);
8373 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8375 VmaStringBuilder sb(allocator);
8377 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8381 allocator->CalculateStats(&stats);
8383 json.WriteString(
"Total");
8384 VmaPrintStatInfo(json, stats.
total);
8386 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8388 json.BeginString(
"Heap ");
8389 json.ContinueString(heapIndex);
8393 json.WriteString(
"Size");
8394 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8396 json.WriteString(
"Flags");
8397 json.BeginArray(
true);
8398 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8400 json.WriteString(
"DEVICE_LOCAL");
8406 json.WriteString(
"Stats");
8407 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8410 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8412 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8414 json.BeginString(
"Type ");
8415 json.ContinueString(typeIndex);
8420 json.WriteString(
"Flags");
8421 json.BeginArray(
true);
8422 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8423 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8425 json.WriteString(
"DEVICE_LOCAL");
8427 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8429 json.WriteString(
"HOST_VISIBLE");
8431 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8433 json.WriteString(
"HOST_COHERENT");
8435 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8437 json.WriteString(
"HOST_CACHED");
8439 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8441 json.WriteString(
"LAZILY_ALLOCATED");
8447 json.WriteString(
"Stats");
8448 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8457 if(detailedMap == VK_TRUE)
8459 allocator->PrintDetailedMap(json);
8465 const size_t len = sb.GetLength();
8466 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8469 memcpy(pChars, sb.GetData(), len);
8472 *ppStatsString = pChars;
8476 VmaAllocator allocator,
8479 if(pStatsString != VMA_NULL)
8481 VMA_ASSERT(allocator);
8482 size_t len = strlen(pStatsString);
8483 vma_delete_array(allocator, pStatsString, len + 1);
8487 #endif // #if VMA_STATS_STRING_ENABLED 8493 VmaAllocator allocator,
8494 uint32_t memoryTypeBits,
8496 uint32_t* pMemoryTypeIndex)
8498 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8499 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8500 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8507 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8511 switch(pAllocationCreateInfo->
usage)
8516 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8519 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8522 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8523 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8526 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8527 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8533 *pMemoryTypeIndex = UINT32_MAX;
8534 uint32_t minCost = UINT32_MAX;
8535 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8536 memTypeIndex < allocator->GetMemoryTypeCount();
8537 ++memTypeIndex, memTypeBit <<= 1)
8540 if((memTypeBit & memoryTypeBits) != 0)
8542 const VkMemoryPropertyFlags currFlags =
8543 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8545 if((requiredFlags & ~currFlags) == 0)
8548 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8550 if(currCost < minCost)
8552 *pMemoryTypeIndex = memTypeIndex;
8562 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8566 VmaAllocator allocator,
8567 const VkBufferCreateInfo* pBufferCreateInfo,
8569 uint32_t* pMemoryTypeIndex)
8571 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8572 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8573 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8574 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8576 const VkDevice hDev = allocator->m_hDevice;
8577 VkBuffer hBuffer = VK_NULL_HANDLE;
8578 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
8579 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
8580 if(res == VK_SUCCESS)
8582 VkMemoryRequirements memReq = {};
8583 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
8584 hDev, hBuffer, &memReq);
8588 memReq.memoryTypeBits,
8589 pAllocationCreateInfo,
8592 allocator->GetVulkanFunctions().vkDestroyBuffer(
8593 hDev, hBuffer, allocator->GetAllocationCallbacks());
8599 VmaAllocator allocator,
8600 const VkImageCreateInfo* pImageCreateInfo,
8602 uint32_t* pMemoryTypeIndex)
8604 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8605 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
8606 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8607 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8609 const VkDevice hDev = allocator->m_hDevice;
8610 VkImage hImage = VK_NULL_HANDLE;
8611 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
8612 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
8613 if(res == VK_SUCCESS)
8615 VkMemoryRequirements memReq = {};
8616 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
8617 hDev, hImage, &memReq);
8621 memReq.memoryTypeBits,
8622 pAllocationCreateInfo,
8625 allocator->GetVulkanFunctions().vkDestroyImage(
8626 hDev, hImage, allocator->GetAllocationCallbacks());
8632 VmaAllocator allocator,
8636 VMA_ASSERT(allocator && pCreateInfo && pPool);
8638 VMA_DEBUG_LOG(
"vmaCreatePool");
8640 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8642 return allocator->CreatePool(pCreateInfo, pPool);
8646 VmaAllocator allocator,
8649 VMA_ASSERT(allocator);
8651 if(pool == VK_NULL_HANDLE)
8656 VMA_DEBUG_LOG(
"vmaDestroyPool");
8658 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8660 allocator->DestroyPool(pool);
8664 VmaAllocator allocator,
8668 VMA_ASSERT(allocator && pool && pPoolStats);
8670 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8672 allocator->GetPoolStats(pool, pPoolStats);
8676 VmaAllocator allocator,
8678 size_t* pLostAllocationCount)
8680 VMA_ASSERT(allocator && pool);
8682 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8684 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8688 VmaAllocator allocator,
8689 const VkMemoryRequirements* pVkMemoryRequirements,
8691 VmaAllocation* pAllocation,
8694 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8696 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8698 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8700 VkResult result = allocator->AllocateMemory(
8701 *pVkMemoryRequirements,
8707 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8710 if(pAllocationInfo && result == VK_SUCCESS)
8712 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8719 VmaAllocator allocator,
8722 VmaAllocation* pAllocation,
8725 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8727 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8729 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8731 VkMemoryRequirements vkMemReq = {};
8732 bool requiresDedicatedAllocation =
false;
8733 bool prefersDedicatedAllocation =
false;
8734 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8735 requiresDedicatedAllocation,
8736 prefersDedicatedAllocation);
8738 VkResult result = allocator->AllocateMemory(
8740 requiresDedicatedAllocation,
8741 prefersDedicatedAllocation,
8745 VMA_SUBALLOCATION_TYPE_BUFFER,
8748 if(pAllocationInfo && result == VK_SUCCESS)
8750 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8757 VmaAllocator allocator,
8760 VmaAllocation* pAllocation,
8763 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8765 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8767 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8769 VkResult result = AllocateMemoryForImage(
8773 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8776 if(pAllocationInfo && result == VK_SUCCESS)
8778 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8785 VmaAllocator allocator,
8786 VmaAllocation allocation)
8788 VMA_ASSERT(allocator && allocation);
8790 VMA_DEBUG_LOG(
"vmaFreeMemory");
8792 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8794 allocator->FreeMemory(allocation);
8798 VmaAllocator allocator,
8799 VmaAllocation allocation,
8802 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8804 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8806 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8810 VmaAllocator allocator,
8811 VmaAllocation allocation)
8813 VMA_ASSERT(allocator && allocation);
8815 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8817 return allocator->TouchAllocation(allocation);
8821 VmaAllocator allocator,
8822 VmaAllocation allocation,
8825 VMA_ASSERT(allocator && allocation);
8827 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8829 allocation->SetUserData(allocator, pUserData);
8833 VmaAllocator allocator,
8834 VmaAllocation* pAllocation)
8836 VMA_ASSERT(allocator && pAllocation);
8838 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8840 allocator->CreateLostAllocation(pAllocation);
8844 VmaAllocator allocator,
8845 VmaAllocation allocation,
8848 VMA_ASSERT(allocator && allocation && ppData);
8850 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8852 return allocator->Map(allocation, ppData);
8856 VmaAllocator allocator,
8857 VmaAllocation allocation)
8859 VMA_ASSERT(allocator && allocation);
8861 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8863 allocator->Unmap(allocation);
8867 VmaAllocator allocator,
8868 VmaAllocation* pAllocations,
8869 size_t allocationCount,
8870 VkBool32* pAllocationsChanged,
8874 VMA_ASSERT(allocator && pAllocations);
8876 VMA_DEBUG_LOG(
"vmaDefragment");
8878 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8880 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8884 VmaAllocator allocator,
8885 const VkBufferCreateInfo* pBufferCreateInfo,
8888 VmaAllocation* pAllocation,
8891 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8893 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8895 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8897 *pBuffer = VK_NULL_HANDLE;
8898 *pAllocation = VK_NULL_HANDLE;
8901 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8902 allocator->m_hDevice,
8904 allocator->GetAllocationCallbacks(),
8909 VkMemoryRequirements vkMemReq = {};
8910 bool requiresDedicatedAllocation =
false;
8911 bool prefersDedicatedAllocation =
false;
8912 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8913 requiresDedicatedAllocation, prefersDedicatedAllocation);
8917 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8919 VMA_ASSERT(vkMemReq.alignment %
8920 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8922 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8924 VMA_ASSERT(vkMemReq.alignment %
8925 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8927 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8929 VMA_ASSERT(vkMemReq.alignment %
8930 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8934 res = allocator->AllocateMemory(
8936 requiresDedicatedAllocation,
8937 prefersDedicatedAllocation,
8940 *pAllocationCreateInfo,
8941 VMA_SUBALLOCATION_TYPE_BUFFER,
8946 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8947 allocator->m_hDevice,
8949 (*pAllocation)->GetMemory(),
8950 (*pAllocation)->GetOffset());
8954 if(pAllocationInfo != VMA_NULL)
8956 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8960 allocator->FreeMemory(*pAllocation);
8961 *pAllocation = VK_NULL_HANDLE;
8962 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8963 *pBuffer = VK_NULL_HANDLE;
8966 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8967 *pBuffer = VK_NULL_HANDLE;
8974 VmaAllocator allocator,
8976 VmaAllocation allocation)
8978 if(buffer != VK_NULL_HANDLE)
8980 VMA_ASSERT(allocator);
8982 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8984 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8986 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8988 allocator->FreeMemory(allocation);
8993 VmaAllocator allocator,
8994 const VkImageCreateInfo* pImageCreateInfo,
8997 VmaAllocation* pAllocation,
9000 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
9002 VMA_DEBUG_LOG(
"vmaCreateImage");
9004 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9006 *pImage = VK_NULL_HANDLE;
9007 *pAllocation = VK_NULL_HANDLE;
9010 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
9011 allocator->m_hDevice,
9013 allocator->GetAllocationCallbacks(),
9017 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
9018 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
9019 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
9022 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
9026 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
9027 allocator->m_hDevice,
9029 (*pAllocation)->GetMemory(),
9030 (*pAllocation)->GetOffset());
9034 if(pAllocationInfo != VMA_NULL)
9036 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9040 allocator->FreeMemory(*pAllocation);
9041 *pAllocation = VK_NULL_HANDLE;
9042 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9043 *pImage = VK_NULL_HANDLE;
9046 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9047 *pImage = VK_NULL_HANDLE;
9054 VmaAllocator allocator,
9056 VmaAllocation allocation)
9058 if(image != VK_NULL_HANDLE)
9060 VMA_ASSERT(allocator);
9062 VMA_DEBUG_LOG(
"vmaDestroyImage");
9064 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9066 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
9068 allocator->FreeMemory(allocation);
9072 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1013
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1267
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1038
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1023
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1224
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1017
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1573
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1035
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1772
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1443
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1497
Definition: vk_mem_alloc.h:1304
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1006
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1342
Definition: vk_mem_alloc.h:1251
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1047
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1100
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1032
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1255
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1165
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1020
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1164
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1028
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1776
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1064
VmaStatInfo total
Definition: vk_mem_alloc.h:1174
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1784
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1326
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1767
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1021
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:948
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1041
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1451
Definition: vk_mem_alloc.h:1445
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1583
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1018
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1363
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1467
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1503
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1004
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1454
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1202
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1762
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1780
Definition: vk_mem_alloc.h:1241
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1350
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1019
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1170
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:954
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:975
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:980
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1782
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1337
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1513
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1014
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1153
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1462
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:967
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1311
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1166
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:971
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1457
Definition: vk_mem_alloc.h:1250
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1332
Definition: vk_mem_alloc.h:1323
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1156
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1016
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1475
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1050
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1506
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1321
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1356
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1088
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1172
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1291
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1165
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1025
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:969
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1024
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1489
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1597
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1044
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1165
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1162
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1494
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1578
Definition: vk_mem_alloc.h:1319
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1778
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1012
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1027
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1160
Definition: vk_mem_alloc.h:1207
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1447
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1158
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1022
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1026
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1278
Definition: vk_mem_alloc.h:1234
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1592
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1002
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1015
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1559
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1425
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1166
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1317
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1173
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1500
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1166
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1564