23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 827 #include <vulkan/vulkan.h> 829 VK_DEFINE_HANDLE(VmaAllocator)
833 VmaAllocator allocator,
835 VkDeviceMemory memory,
839 VmaAllocator allocator,
841 VkDeviceMemory memory,
990 VmaAllocator* pAllocator);
994 VmaAllocator allocator);
1001 VmaAllocator allocator,
1002 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1009 VmaAllocator allocator,
1010 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1019 VmaAllocator allocator,
1020 uint32_t memoryTypeIndex,
1021 VkMemoryPropertyFlags* pFlags);
1032 VmaAllocator allocator,
1033 uint32_t frameIndex);
1063 VmaAllocator allocator,
1066 #define VMA_STATS_STRING_ENABLED 1 1068 #if VMA_STATS_STRING_ENABLED 1074 VmaAllocator allocator,
1075 char** ppStatsString,
1076 VkBool32 detailedMap);
1079 VmaAllocator allocator,
1080 char* pStatsString);
1082 #endif // #if VMA_STATS_STRING_ENABLED 1084 VK_DEFINE_HANDLE(VmaPool)
1267 VmaAllocator allocator,
1268 uint32_t memoryTypeBits,
1270 uint32_t* pMemoryTypeIndex);
1285 VmaAllocator allocator,
1286 const VkBufferCreateInfo* pBufferCreateInfo,
1288 uint32_t* pMemoryTypeIndex);
1303 VmaAllocator allocator,
1304 const VkImageCreateInfo* pImageCreateInfo,
1306 uint32_t* pMemoryTypeIndex);
1407 VmaAllocator allocator,
1414 VmaAllocator allocator,
1424 VmaAllocator allocator,
1435 VmaAllocator allocator,
1437 size_t* pLostAllocationCount);
1439 VK_DEFINE_HANDLE(VmaAllocation)
1495 VmaAllocator allocator,
1496 const VkMemoryRequirements* pVkMemoryRequirements,
1498 VmaAllocation* pAllocation,
1508 VmaAllocator allocator,
1511 VmaAllocation* pAllocation,
1516 VmaAllocator allocator,
1519 VmaAllocation* pAllocation,
1524 VmaAllocator allocator,
1525 VmaAllocation allocation);
1532 VmaAllocator allocator,
1533 VmaAllocation allocation,
1539 VmaAllocator allocator,
1540 VmaAllocation allocation);
1556 VmaAllocator allocator,
1557 VmaAllocation allocation,
1571 VmaAllocator allocator,
1572 VmaAllocation* pAllocation);
1609 VmaAllocator allocator,
1610 VmaAllocation allocation,
1618 VmaAllocator allocator,
1619 VmaAllocation allocation);
1730 VmaAllocator allocator,
1731 VmaAllocation* pAllocations,
1732 size_t allocationCount,
1733 VkBool32* pAllocationsChanged,
1764 VmaAllocator allocator,
1765 const VkBufferCreateInfo* pBufferCreateInfo,
1768 VmaAllocation* pAllocation,
1783 VmaAllocator allocator,
1785 VmaAllocation allocation);
1789 VmaAllocator allocator,
1790 const VkImageCreateInfo* pImageCreateInfo,
1793 VmaAllocation* pAllocation,
1808 VmaAllocator allocator,
1810 VmaAllocation allocation);
1816 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1819 #ifdef __INTELLISENSE__ 1820 #define VMA_IMPLEMENTATION 1823 #ifdef VMA_IMPLEMENTATION 1824 #undef VMA_IMPLEMENTATION 1846 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 1847 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1859 #if VMA_USE_STL_CONTAINERS 1860 #define VMA_USE_STL_VECTOR 1 1861 #define VMA_USE_STL_UNORDERED_MAP 1 1862 #define VMA_USE_STL_LIST 1 1865 #if VMA_USE_STL_VECTOR 1869 #if VMA_USE_STL_UNORDERED_MAP 1870 #include <unordered_map> 1873 #if VMA_USE_STL_LIST 1882 #include <algorithm> 1886 #if !defined(_WIN32) && !defined(__APPLE__) 1892 #define VMA_NULL nullptr 1895 #if defined(__APPLE__) || defined(__ANDROID__) 1897 void *aligned_alloc(
size_t alignment,
size_t size)
1900 if(alignment <
sizeof(
void*))
1902 alignment =
sizeof(
void*);
1906 if(posix_memalign(&pointer, alignment, size) == 0)
1915 #define VMA_ASSERT(expr) assert(expr) 1917 #define VMA_ASSERT(expr) 1923 #ifndef VMA_HEAVY_ASSERT 1925 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1927 #define VMA_HEAVY_ASSERT(expr) 1931 #ifndef VMA_ALIGN_OF 1932 #define VMA_ALIGN_OF(type) (__alignof(type)) 1935 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1937 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1939 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1943 #ifndef VMA_SYSTEM_FREE 1945 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1947 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1952 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1956 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1960 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1964 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1967 #ifndef VMA_DEBUG_LOG 1968 #define VMA_DEBUG_LOG(format, ...) 1978 #if VMA_STATS_STRING_ENABLED 1979 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1981 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1983 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1985 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1987 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1989 snprintf(outStr, strLen,
"%p", ptr);
1999 void Lock() { m_Mutex.lock(); }
2000 void Unlock() { m_Mutex.unlock(); }
2004 #define VMA_MUTEX VmaMutex 2015 #ifndef VMA_ATOMIC_UINT32 2016 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2019 #ifndef VMA_BEST_FIT 2032 #define VMA_BEST_FIT (1) 2035 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2040 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2043 #ifndef VMA_DEBUG_ALIGNMENT 2048 #define VMA_DEBUG_ALIGNMENT (1) 2051 #ifndef VMA_DEBUG_MARGIN 2056 #define VMA_DEBUG_MARGIN (0) 2059 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2064 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2067 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2072 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2075 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2076 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2080 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2081 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2085 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2091 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2092 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2095 static inline uint32_t VmaCountBitsSet(uint32_t v)
2097 uint32_t c = v - ((v >> 1) & 0x55555555);
2098 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2099 c = ((c >> 4) + c) & 0x0F0F0F0F;
2100 c = ((c >> 8) + c) & 0x00FF00FF;
2101 c = ((c >> 16) + c) & 0x0000FFFF;
2107 template <
typename T>
2108 static inline T VmaAlignUp(T val, T align)
2110 return (val + align - 1) / align * align;
2114 template <
typename T>
2115 inline T VmaRoundDiv(T x, T y)
2117 return (x + (y / (T)2)) / y;
2122 template<
typename Iterator,
typename Compare>
2123 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2125 Iterator centerValue = end; --centerValue;
2126 Iterator insertIndex = beg;
2127 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2129 if(cmp(*memTypeIndex, *centerValue))
2131 if(insertIndex != memTypeIndex)
2133 VMA_SWAP(*memTypeIndex, *insertIndex);
2138 if(insertIndex != centerValue)
2140 VMA_SWAP(*insertIndex, *centerValue);
2145 template<
typename Iterator,
typename Compare>
2146 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2150 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2151 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2152 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2156 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2158 #endif // #ifndef VMA_SORT 2167 static inline bool VmaBlocksOnSamePage(
2168 VkDeviceSize resourceAOffset,
2169 VkDeviceSize resourceASize,
2170 VkDeviceSize resourceBOffset,
2171 VkDeviceSize pageSize)
2173 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2174 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2175 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2176 VkDeviceSize resourceBStart = resourceBOffset;
2177 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2178 return resourceAEndPage == resourceBStartPage;
2181 enum VmaSuballocationType
2183 VMA_SUBALLOCATION_TYPE_FREE = 0,
2184 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2185 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2186 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2187 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2188 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2189 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2198 static inline bool VmaIsBufferImageGranularityConflict(
2199 VmaSuballocationType suballocType1,
2200 VmaSuballocationType suballocType2)
2202 if(suballocType1 > suballocType2)
2204 VMA_SWAP(suballocType1, suballocType2);
2207 switch(suballocType1)
2209 case VMA_SUBALLOCATION_TYPE_FREE:
2211 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2213 case VMA_SUBALLOCATION_TYPE_BUFFER:
2215 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2216 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2217 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2219 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2220 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2221 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2222 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2224 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2225 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2237 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2238 m_pMutex(useMutex ? &mutex : VMA_NULL)
2255 VMA_MUTEX* m_pMutex;
2258 #if VMA_DEBUG_GLOBAL_MUTEX 2259 static VMA_MUTEX gDebugGlobalMutex;
2260 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2262 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2266 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2277 template <
typename IterT,
typename KeyT,
typename CmpT>
2278 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2280 size_t down = 0, up = (end - beg);
2283 const size_t mid = (down + up) / 2;
2284 if(cmp(*(beg+mid), key))
2299 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2301 if((pAllocationCallbacks != VMA_NULL) &&
2302 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2304 return (*pAllocationCallbacks->pfnAllocation)(
2305 pAllocationCallbacks->pUserData,
2308 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2312 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2316 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2318 if((pAllocationCallbacks != VMA_NULL) &&
2319 (pAllocationCallbacks->pfnFree != VMA_NULL))
2321 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2325 VMA_SYSTEM_FREE(ptr);
2329 template<
typename T>
2330 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2332 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2335 template<
typename T>
2336 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2338 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2341 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2343 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2345 template<
typename T>
2346 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2349 VmaFree(pAllocationCallbacks, ptr);
2352 template<
typename T>
2353 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2357 for(
size_t i = count; i--; )
2361 VmaFree(pAllocationCallbacks, ptr);
2366 template<
typename T>
2367 class VmaStlAllocator
2370 const VkAllocationCallbacks*
const m_pCallbacks;
2371 typedef T value_type;
2373 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2374 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2376 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2377 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2379 template<
typename U>
2380 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2382 return m_pCallbacks == rhs.m_pCallbacks;
2384 template<
typename U>
2385 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2387 return m_pCallbacks != rhs.m_pCallbacks;
2390 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2393 #if VMA_USE_STL_VECTOR 2395 #define VmaVector std::vector 2397 template<
typename T,
typename allocatorT>
2398 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2400 vec.insert(vec.begin() + index, item);
2403 template<
typename T,
typename allocatorT>
2404 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2406 vec.erase(vec.begin() + index);
2409 #else // #if VMA_USE_STL_VECTOR 2414 template<
typename T,
typename AllocatorT>
2418 typedef T value_type;
2420 VmaVector(
const AllocatorT& allocator) :
2421 m_Allocator(allocator),
2428 VmaVector(
size_t count,
const AllocatorT& allocator) :
2429 m_Allocator(allocator),
2430 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2436 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2437 m_Allocator(src.m_Allocator),
2438 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2439 m_Count(src.m_Count),
2440 m_Capacity(src.m_Count)
2444 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2450 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2453 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2457 resize(rhs.m_Count);
2460 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2466 bool empty()
const {
return m_Count == 0; }
2467 size_t size()
const {
return m_Count; }
2468 T* data() {
return m_pArray; }
2469 const T* data()
const {
return m_pArray; }
2471 T& operator[](
size_t index)
2473 VMA_HEAVY_ASSERT(index < m_Count);
2474 return m_pArray[index];
2476 const T& operator[](
size_t index)
const 2478 VMA_HEAVY_ASSERT(index < m_Count);
2479 return m_pArray[index];
2484 VMA_HEAVY_ASSERT(m_Count > 0);
2487 const T& front()
const 2489 VMA_HEAVY_ASSERT(m_Count > 0);
2494 VMA_HEAVY_ASSERT(m_Count > 0);
2495 return m_pArray[m_Count - 1];
2497 const T& back()
const 2499 VMA_HEAVY_ASSERT(m_Count > 0);
2500 return m_pArray[m_Count - 1];
2503 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2505 newCapacity = VMA_MAX(newCapacity, m_Count);
2507 if((newCapacity < m_Capacity) && !freeMemory)
2509 newCapacity = m_Capacity;
2512 if(newCapacity != m_Capacity)
2514 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2517 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2519 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2520 m_Capacity = newCapacity;
2521 m_pArray = newArray;
2525 void resize(
size_t newCount,
bool freeMemory =
false)
2527 size_t newCapacity = m_Capacity;
2528 if(newCount > m_Capacity)
2530 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2534 newCapacity = newCount;
2537 if(newCapacity != m_Capacity)
2539 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2540 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2541 if(elementsToCopy != 0)
2543 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2545 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2546 m_Capacity = newCapacity;
2547 m_pArray = newArray;
2553 void clear(
bool freeMemory =
false)
2555 resize(0, freeMemory);
2558 void insert(
size_t index,
const T& src)
2560 VMA_HEAVY_ASSERT(index <= m_Count);
2561 const size_t oldCount = size();
2562 resize(oldCount + 1);
2563 if(index < oldCount)
2565 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2567 m_pArray[index] = src;
2570 void remove(
size_t index)
2572 VMA_HEAVY_ASSERT(index < m_Count);
2573 const size_t oldCount = size();
2574 if(index < oldCount - 1)
2576 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2578 resize(oldCount - 1);
2581 void push_back(
const T& src)
2583 const size_t newIndex = size();
2584 resize(newIndex + 1);
2585 m_pArray[newIndex] = src;
2590 VMA_HEAVY_ASSERT(m_Count > 0);
2594 void push_front(
const T& src)
2601 VMA_HEAVY_ASSERT(m_Count > 0);
2605 typedef T* iterator;
2607 iterator begin() {
return m_pArray; }
2608 iterator end() {
return m_pArray + m_Count; }
2611 AllocatorT m_Allocator;
2617 template<
typename T,
typename allocatorT>
2618 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2620 vec.insert(index, item);
2623 template<
typename T,
typename allocatorT>
2624 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2629 #endif // #if VMA_USE_STL_VECTOR 2631 template<
typename CmpLess,
typename VectorT>
2632 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2634 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2636 vector.data() + vector.size(),
2638 CmpLess()) - vector.data();
2639 VmaVectorInsert(vector, indexToInsert, value);
2640 return indexToInsert;
2643 template<
typename CmpLess,
typename VectorT>
2644 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2647 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2652 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2654 size_t indexToRemove = it - vector.begin();
2655 VmaVectorRemove(vector, indexToRemove);
2661 template<
typename CmpLess,
typename VectorT>
2662 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2665 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2667 vector.data() + vector.size(),
2670 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2672 return it - vector.begin();
2676 return vector.size();
2688 template<
typename T>
2689 class VmaPoolAllocator
2692 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2693 ~VmaPoolAllocator();
2701 uint32_t NextFreeIndex;
2708 uint32_t FirstFreeIndex;
2711 const VkAllocationCallbacks* m_pAllocationCallbacks;
2712 size_t m_ItemsPerBlock;
2713 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2715 ItemBlock& CreateNewBlock();
2718 template<
typename T>
2719 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2720 m_pAllocationCallbacks(pAllocationCallbacks),
2721 m_ItemsPerBlock(itemsPerBlock),
2722 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2724 VMA_ASSERT(itemsPerBlock > 0);
2727 template<
typename T>
2728 VmaPoolAllocator<T>::~VmaPoolAllocator()
2733 template<
typename T>
2734 void VmaPoolAllocator<T>::Clear()
2736 for(
size_t i = m_ItemBlocks.size(); i--; )
2737 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2738 m_ItemBlocks.clear();
2741 template<
typename T>
2742 T* VmaPoolAllocator<T>::Alloc()
2744 for(
size_t i = m_ItemBlocks.size(); i--; )
2746 ItemBlock& block = m_ItemBlocks[i];
2748 if(block.FirstFreeIndex != UINT32_MAX)
2750 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2751 block.FirstFreeIndex = pItem->NextFreeIndex;
2752 return &pItem->Value;
2757 ItemBlock& newBlock = CreateNewBlock();
2758 Item*
const pItem = &newBlock.pItems[0];
2759 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2760 return &pItem->Value;
2763 template<
typename T>
2764 void VmaPoolAllocator<T>::Free(T* ptr)
2767 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2769 ItemBlock& block = m_ItemBlocks[i];
2773 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2776 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2778 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2779 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2780 block.FirstFreeIndex = index;
2784 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2787 template<
typename T>
2788 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2790 ItemBlock newBlock = {
2791 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2793 m_ItemBlocks.push_back(newBlock);
2796 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2797 newBlock.pItems[i].NextFreeIndex = i + 1;
2798 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2799 return m_ItemBlocks.back();
2805 #if VMA_USE_STL_LIST 2807 #define VmaList std::list 2809 #else // #if VMA_USE_STL_LIST 2811 template<
typename T>
2820 template<
typename T>
2824 typedef VmaListItem<T> ItemType;
2826 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2830 size_t GetCount()
const {
return m_Count; }
2831 bool IsEmpty()
const {
return m_Count == 0; }
2833 ItemType* Front() {
return m_pFront; }
2834 const ItemType* Front()
const {
return m_pFront; }
2835 ItemType* Back() {
return m_pBack; }
2836 const ItemType* Back()
const {
return m_pBack; }
2838 ItemType* PushBack();
2839 ItemType* PushFront();
2840 ItemType* PushBack(
const T& value);
2841 ItemType* PushFront(
const T& value);
2846 ItemType* InsertBefore(ItemType* pItem);
2848 ItemType* InsertAfter(ItemType* pItem);
2850 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2851 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2853 void Remove(ItemType* pItem);
2856 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2857 VmaPoolAllocator<ItemType> m_ItemAllocator;
2863 VmaRawList(
const VmaRawList<T>& src);
2864 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2867 template<
typename T>
2868 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2869 m_pAllocationCallbacks(pAllocationCallbacks),
2870 m_ItemAllocator(pAllocationCallbacks, 128),
2877 template<
typename T>
2878 VmaRawList<T>::~VmaRawList()
2884 template<
typename T>
2885 void VmaRawList<T>::Clear()
2887 if(IsEmpty() ==
false)
2889 ItemType* pItem = m_pBack;
2890 while(pItem != VMA_NULL)
2892 ItemType*
const pPrevItem = pItem->pPrev;
2893 m_ItemAllocator.Free(pItem);
2896 m_pFront = VMA_NULL;
2902 template<
typename T>
2903 VmaListItem<T>* VmaRawList<T>::PushBack()
2905 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2906 pNewItem->pNext = VMA_NULL;
2909 pNewItem->pPrev = VMA_NULL;
2910 m_pFront = pNewItem;
2916 pNewItem->pPrev = m_pBack;
2917 m_pBack->pNext = pNewItem;
2924 template<
typename T>
2925 VmaListItem<T>* VmaRawList<T>::PushFront()
2927 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2928 pNewItem->pPrev = VMA_NULL;
2931 pNewItem->pNext = VMA_NULL;
2932 m_pFront = pNewItem;
2938 pNewItem->pNext = m_pFront;
2939 m_pFront->pPrev = pNewItem;
2940 m_pFront = pNewItem;
2946 template<
typename T>
2947 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2949 ItemType*
const pNewItem = PushBack();
2950 pNewItem->Value = value;
2954 template<
typename T>
2955 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2957 ItemType*
const pNewItem = PushFront();
2958 pNewItem->Value = value;
2962 template<
typename T>
2963 void VmaRawList<T>::PopBack()
2965 VMA_HEAVY_ASSERT(m_Count > 0);
2966 ItemType*
const pBackItem = m_pBack;
2967 ItemType*
const pPrevItem = pBackItem->pPrev;
2968 if(pPrevItem != VMA_NULL)
2970 pPrevItem->pNext = VMA_NULL;
2972 m_pBack = pPrevItem;
2973 m_ItemAllocator.Free(pBackItem);
2977 template<
typename T>
2978 void VmaRawList<T>::PopFront()
2980 VMA_HEAVY_ASSERT(m_Count > 0);
2981 ItemType*
const pFrontItem = m_pFront;
2982 ItemType*
const pNextItem = pFrontItem->pNext;
2983 if(pNextItem != VMA_NULL)
2985 pNextItem->pPrev = VMA_NULL;
2987 m_pFront = pNextItem;
2988 m_ItemAllocator.Free(pFrontItem);
2992 template<
typename T>
2993 void VmaRawList<T>::Remove(ItemType* pItem)
2995 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2996 VMA_HEAVY_ASSERT(m_Count > 0);
2998 if(pItem->pPrev != VMA_NULL)
3000 pItem->pPrev->pNext = pItem->pNext;
3004 VMA_HEAVY_ASSERT(m_pFront == pItem);
3005 m_pFront = pItem->pNext;
3008 if(pItem->pNext != VMA_NULL)
3010 pItem->pNext->pPrev = pItem->pPrev;
3014 VMA_HEAVY_ASSERT(m_pBack == pItem);
3015 m_pBack = pItem->pPrev;
3018 m_ItemAllocator.Free(pItem);
3022 template<
typename T>
3023 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3025 if(pItem != VMA_NULL)
3027 ItemType*
const prevItem = pItem->pPrev;
3028 ItemType*
const newItem = m_ItemAllocator.Alloc();
3029 newItem->pPrev = prevItem;
3030 newItem->pNext = pItem;
3031 pItem->pPrev = newItem;
3032 if(prevItem != VMA_NULL)
3034 prevItem->pNext = newItem;
3038 VMA_HEAVY_ASSERT(m_pFront == pItem);
3048 template<
typename T>
3049 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3051 if(pItem != VMA_NULL)
3053 ItemType*
const nextItem = pItem->pNext;
3054 ItemType*
const newItem = m_ItemAllocator.Alloc();
3055 newItem->pNext = nextItem;
3056 newItem->pPrev = pItem;
3057 pItem->pNext = newItem;
3058 if(nextItem != VMA_NULL)
3060 nextItem->pPrev = newItem;
3064 VMA_HEAVY_ASSERT(m_pBack == pItem);
3074 template<
typename T>
3075 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3077 ItemType*
const newItem = InsertBefore(pItem);
3078 newItem->Value = value;
3082 template<
typename T>
3083 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3085 ItemType*
const newItem = InsertAfter(pItem);
3086 newItem->Value = value;
3090 template<
typename T,
typename AllocatorT>
3103 T& operator*()
const 3105 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3106 return m_pItem->Value;
3108 T* operator->()
const 3110 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3111 return &m_pItem->Value;
3114 iterator& operator++()
3116 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3117 m_pItem = m_pItem->pNext;
3120 iterator& operator--()
3122 if(m_pItem != VMA_NULL)
3124 m_pItem = m_pItem->pPrev;
3128 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3129 m_pItem = m_pList->Back();
3134 iterator operator++(
int)
3136 iterator result = *
this;
3140 iterator operator--(
int)
3142 iterator result = *
this;
3147 bool operator==(
const iterator& rhs)
const 3149 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3150 return m_pItem == rhs.m_pItem;
3152 bool operator!=(
const iterator& rhs)
const 3154 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3155 return m_pItem != rhs.m_pItem;
3159 VmaRawList<T>* m_pList;
3160 VmaListItem<T>* m_pItem;
3162 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3168 friend class VmaList<T, AllocatorT>;
3171 class const_iterator
3180 const_iterator(
const iterator& src) :
3181 m_pList(src.m_pList),
3182 m_pItem(src.m_pItem)
3186 const T& operator*()
const 3188 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3189 return m_pItem->Value;
3191 const T* operator->()
const 3193 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3194 return &m_pItem->Value;
3197 const_iterator& operator++()
3199 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3200 m_pItem = m_pItem->pNext;
3203 const_iterator& operator--()
3205 if(m_pItem != VMA_NULL)
3207 m_pItem = m_pItem->pPrev;
3211 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3212 m_pItem = m_pList->Back();
3217 const_iterator operator++(
int)
3219 const_iterator result = *
this;
3223 const_iterator operator--(
int)
3225 const_iterator result = *
this;
3230 bool operator==(
const const_iterator& rhs)
const 3232 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3233 return m_pItem == rhs.m_pItem;
3235 bool operator!=(
const const_iterator& rhs)
const 3237 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3238 return m_pItem != rhs.m_pItem;
3242 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3248 const VmaRawList<T>* m_pList;
3249 const VmaListItem<T>* m_pItem;
3251 friend class VmaList<T, AllocatorT>;
3254 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3256 bool empty()
const {
return m_RawList.IsEmpty(); }
3257 size_t size()
const {
return m_RawList.GetCount(); }
3259 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3260 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3262 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3263 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3265 void clear() { m_RawList.Clear(); }
3266 void push_back(
const T& value) { m_RawList.PushBack(value); }
3267 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3268 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3271 VmaRawList<T> m_RawList;
3274 #endif // #if VMA_USE_STL_LIST 3282 #if VMA_USE_STL_UNORDERED_MAP 3284 #define VmaPair std::pair 3286 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3287 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3289 #else // #if VMA_USE_STL_UNORDERED_MAP 3291 template<
typename T1,
typename T2>
3297 VmaPair() : first(), second() { }
3298 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3304 template<
typename KeyT,
typename ValueT>
3308 typedef VmaPair<KeyT, ValueT> PairType;
3309 typedef PairType* iterator;
3311 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3313 iterator begin() {
return m_Vector.begin(); }
3314 iterator end() {
return m_Vector.end(); }
3316 void insert(
const PairType& pair);
3317 iterator find(
const KeyT& key);
3318 void erase(iterator it);
3321 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3324 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3326 template<
typename FirstT,
typename SecondT>
3327 struct VmaPairFirstLess
3329 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3331 return lhs.first < rhs.first;
3333 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3335 return lhs.first < rhsFirst;
3339 template<
typename KeyT,
typename ValueT>
3340 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3342 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3344 m_Vector.data() + m_Vector.size(),
3346 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3347 VmaVectorInsert(m_Vector, indexToInsert, pair);
3350 template<
typename KeyT,
typename ValueT>
3351 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3353 PairType* it = VmaBinaryFindFirstNotLess(
3355 m_Vector.data() + m_Vector.size(),
3357 VmaPairFirstLess<KeyT, ValueT>());
3358 if((it != m_Vector.end()) && (it->first == key))
3364 return m_Vector.end();
3368 template<
typename KeyT,
typename ValueT>
3369 void VmaMap<KeyT, ValueT>::erase(iterator it)
3371 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3374 #endif // #if VMA_USE_STL_UNORDERED_MAP 3380 class VmaDeviceMemoryBlock;
3382 struct VmaAllocation_T
3385 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3389 FLAG_USER_DATA_STRING = 0x01,
3393 enum ALLOCATION_TYPE
3395 ALLOCATION_TYPE_NONE,
3396 ALLOCATION_TYPE_BLOCK,
3397 ALLOCATION_TYPE_DEDICATED,
3400 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3403 m_pUserData(VMA_NULL),
3404 m_LastUseFrameIndex(currentFrameIndex),
3405 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3406 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3408 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3414 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3417 VMA_ASSERT(m_pUserData == VMA_NULL);
3420 void InitBlockAllocation(
3422 VmaDeviceMemoryBlock* block,
3423 VkDeviceSize offset,
3424 VkDeviceSize alignment,
3426 VmaSuballocationType suballocationType,
3430 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3431 VMA_ASSERT(block != VMA_NULL);
3432 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3433 m_Alignment = alignment;
3435 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3436 m_SuballocationType = (uint8_t)suballocationType;
3437 m_BlockAllocation.m_hPool = hPool;
3438 m_BlockAllocation.m_Block = block;
3439 m_BlockAllocation.m_Offset = offset;
3440 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3445 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3446 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3447 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3448 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3449 m_BlockAllocation.m_Block = VMA_NULL;
3450 m_BlockAllocation.m_Offset = 0;
3451 m_BlockAllocation.m_CanBecomeLost =
true;
3454 void ChangeBlockAllocation(
3455 VmaAllocator hAllocator,
3456 VmaDeviceMemoryBlock* block,
3457 VkDeviceSize offset);
3460 void InitDedicatedAllocation(
3461 uint32_t memoryTypeIndex,
3462 VkDeviceMemory hMemory,
3463 VmaSuballocationType suballocationType,
3467 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3468 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3469 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3472 m_SuballocationType = (uint8_t)suballocationType;
3473 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3474 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3475 m_DedicatedAllocation.m_hMemory = hMemory;
3476 m_DedicatedAllocation.m_pMappedData = pMappedData;
3479 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3480 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3481 VkDeviceSize GetSize()
const {
return m_Size; }
3482 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3483 void* GetUserData()
const {
return m_pUserData; }
3484 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3485 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3487 VmaDeviceMemoryBlock* GetBlock()
const 3489 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3490 return m_BlockAllocation.m_Block;
3492 VkDeviceSize GetOffset()
const;
3493 VkDeviceMemory GetMemory()
const;
3494 uint32_t GetMemoryTypeIndex()
const;
3495 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3496 void* GetMappedData()
const;
3497 bool CanBecomeLost()
const;
3498 VmaPool GetPool()
const;
3500 uint32_t GetLastUseFrameIndex()
const 3502 return m_LastUseFrameIndex.load();
3504 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3506 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3516 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3518 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3520 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3531 void BlockAllocMap();
3532 void BlockAllocUnmap();
3533 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3534 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3537 VkDeviceSize m_Alignment;
3538 VkDeviceSize m_Size;
3540 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3542 uint8_t m_SuballocationType;
3549 struct BlockAllocation
3552 VmaDeviceMemoryBlock* m_Block;
3553 VkDeviceSize m_Offset;
3554 bool m_CanBecomeLost;
3558 struct DedicatedAllocation
3560 uint32_t m_MemoryTypeIndex;
3561 VkDeviceMemory m_hMemory;
3562 void* m_pMappedData;
3568 BlockAllocation m_BlockAllocation;
3570 DedicatedAllocation m_DedicatedAllocation;
3573 void FreeUserDataString(VmaAllocator hAllocator);
3580 struct VmaSuballocation
3582 VkDeviceSize offset;
3584 VmaAllocation hAllocation;
3585 VmaSuballocationType type;
3588 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3591 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3606 struct VmaAllocationRequest
3608 VkDeviceSize offset;
3609 VkDeviceSize sumFreeSize;
3610 VkDeviceSize sumItemSize;
3611 VmaSuballocationList::iterator item;
3612 size_t itemsToMakeLostCount;
3614 VkDeviceSize CalcCost()
const 3616 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3624 class VmaBlockMetadata
3627 VmaBlockMetadata(VmaAllocator hAllocator);
3628 ~VmaBlockMetadata();
3629 void Init(VkDeviceSize size);
3632 bool Validate()
const;
3633 VkDeviceSize GetSize()
const {
return m_Size; }
3634 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3635 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3636 VkDeviceSize GetUnusedRangeSizeMax()
const;
3638 bool IsEmpty()
const;
3640 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3643 #if VMA_STATS_STRING_ENABLED 3644 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3648 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3653 bool CreateAllocationRequest(
3654 uint32_t currentFrameIndex,
3655 uint32_t frameInUseCount,
3656 VkDeviceSize bufferImageGranularity,
3657 VkDeviceSize allocSize,
3658 VkDeviceSize allocAlignment,
3659 VmaSuballocationType allocType,
3660 bool canMakeOtherLost,
3661 VmaAllocationRequest* pAllocationRequest);
3663 bool MakeRequestedAllocationsLost(
3664 uint32_t currentFrameIndex,
3665 uint32_t frameInUseCount,
3666 VmaAllocationRequest* pAllocationRequest);
3668 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3672 const VmaAllocationRequest& request,
3673 VmaSuballocationType type,
3674 VkDeviceSize allocSize,
3675 VmaAllocation hAllocation);
3678 void Free(
const VmaAllocation allocation);
3679 void FreeAtOffset(VkDeviceSize offset);
3682 VkDeviceSize m_Size;
3683 uint32_t m_FreeCount;
3684 VkDeviceSize m_SumFreeSize;
3685 VmaSuballocationList m_Suballocations;
3688 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3690 bool ValidateFreeSuballocationList()
const;
3694 bool CheckAllocation(
3695 uint32_t currentFrameIndex,
3696 uint32_t frameInUseCount,
3697 VkDeviceSize bufferImageGranularity,
3698 VkDeviceSize allocSize,
3699 VkDeviceSize allocAlignment,
3700 VmaSuballocationType allocType,
3701 VmaSuballocationList::const_iterator suballocItem,
3702 bool canMakeOtherLost,
3703 VkDeviceSize* pOffset,
3704 size_t* itemsToMakeLostCount,
3705 VkDeviceSize* pSumFreeSize,
3706 VkDeviceSize* pSumItemSize)
const;
3708 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3712 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3715 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3718 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3722 class VmaDeviceMemoryMapping
3725 VmaDeviceMemoryMapping();
3726 ~VmaDeviceMemoryMapping();
3728 void* GetMappedData()
const {
return m_pMappedData; }
3731 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData);
3732 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count);
3736 uint32_t m_MapCount;
3737 void* m_pMappedData;
3746 class VmaDeviceMemoryBlock
3749 uint32_t m_MemoryTypeIndex;
3750 VkDeviceMemory m_hMemory;
3751 VmaDeviceMemoryMapping m_Mapping;
3752 VmaBlockMetadata m_Metadata;
3754 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3756 ~VmaDeviceMemoryBlock()
3758 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3763 uint32_t newMemoryTypeIndex,
3764 VkDeviceMemory newMemory,
3765 VkDeviceSize newSize);
3767 void Destroy(VmaAllocator allocator);
3770 bool Validate()
const;
3773 VkResult Map(VmaAllocator hAllocator, uint32_t count,
void** ppData);
3774 void Unmap(VmaAllocator hAllocator, uint32_t count);
3777 struct VmaPointerLess
3779 bool operator()(
const void* lhs,
const void* rhs)
const 3785 class VmaDefragmentator;
3793 struct VmaBlockVector
3796 VmaAllocator hAllocator,
3797 uint32_t memoryTypeIndex,
3798 VkDeviceSize preferredBlockSize,
3799 size_t minBlockCount,
3800 size_t maxBlockCount,
3801 VkDeviceSize bufferImageGranularity,
3802 uint32_t frameInUseCount,
3806 VkResult CreateMinBlocks();
3808 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3809 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3810 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3811 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3815 bool IsEmpty()
const {
return m_Blocks.empty(); }
3818 VmaPool hCurrentPool,
3819 uint32_t currentFrameIndex,
3820 const VkMemoryRequirements& vkMemReq,
3822 VmaSuballocationType suballocType,
3823 VmaAllocation* pAllocation);
3826 VmaAllocation hAllocation);
3831 #if VMA_STATS_STRING_ENABLED 3832 void PrintDetailedMap(
class VmaJsonWriter& json);
3835 void MakePoolAllocationsLost(
3836 uint32_t currentFrameIndex,
3837 size_t* pLostAllocationCount);
3839 VmaDefragmentator* EnsureDefragmentator(
3840 VmaAllocator hAllocator,
3841 uint32_t currentFrameIndex);
3843 VkResult Defragment(
3845 VkDeviceSize& maxBytesToMove,
3846 uint32_t& maxAllocationsToMove);
3848 void DestroyDefragmentator();
3851 friend class VmaDefragmentator;
3853 const VmaAllocator m_hAllocator;
3854 const uint32_t m_MemoryTypeIndex;
3855 const VkDeviceSize m_PreferredBlockSize;
3856 const size_t m_MinBlockCount;
3857 const size_t m_MaxBlockCount;
3858 const VkDeviceSize m_BufferImageGranularity;
3859 const uint32_t m_FrameInUseCount;
3860 const bool m_IsCustomPool;
3863 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3867 bool m_HasEmptyBlock;
3868 VmaDefragmentator* m_pDefragmentator;
3870 size_t CalcMaxBlockSize()
const;
3873 void Remove(VmaDeviceMemoryBlock* pBlock);
3877 void IncrementallySortBlocks();
3879 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3885 VmaBlockVector m_BlockVector;
3889 VmaAllocator hAllocator,
3893 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3895 #if VMA_STATS_STRING_ENABLED 3900 class VmaDefragmentator
3902 const VmaAllocator m_hAllocator;
3903 VmaBlockVector*
const m_pBlockVector;
3904 uint32_t m_CurrentFrameIndex;
3905 VkDeviceSize m_BytesMoved;
3906 uint32_t m_AllocationsMoved;
3908 struct AllocationInfo
3910 VmaAllocation m_hAllocation;
3911 VkBool32* m_pChanged;
3914 m_hAllocation(VK_NULL_HANDLE),
3915 m_pChanged(VMA_NULL)
3920 struct AllocationInfoSizeGreater
3922 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3924 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3929 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3933 VmaDeviceMemoryBlock* m_pBlock;
3934 bool m_HasNonMovableAllocations;
3935 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3937 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3939 m_HasNonMovableAllocations(true),
3940 m_Allocations(pAllocationCallbacks),
3941 m_pMappedDataForDefragmentation(VMA_NULL)
3945 void CalcHasNonMovableAllocations()
3947 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3948 const size_t defragmentAllocCount = m_Allocations.size();
3949 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3952 void SortAllocationsBySizeDescecnding()
3954 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3957 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3958 void Unmap(VmaAllocator hAllocator);
3962 void* m_pMappedDataForDefragmentation;
3965 struct BlockPointerLess
3967 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3969 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3971 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3973 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3979 struct BlockInfoCompareMoveDestination
3981 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3983 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3987 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3991 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3999 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4000 BlockInfoVector m_Blocks;
4002 VkResult DefragmentRound(
4003 VkDeviceSize maxBytesToMove,
4004 uint32_t maxAllocationsToMove);
4006 static bool MoveMakesSense(
4007 size_t dstBlockIndex, VkDeviceSize dstOffset,
4008 size_t srcBlockIndex, VkDeviceSize srcOffset);
4012 VmaAllocator hAllocator,
4013 VmaBlockVector* pBlockVector,
4014 uint32_t currentFrameIndex);
4016 ~VmaDefragmentator();
4018 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4019 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4021 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
4023 VkResult Defragment(
4024 VkDeviceSize maxBytesToMove,
4025 uint32_t maxAllocationsToMove);
4029 struct VmaAllocator_T
4032 bool m_UseKhrDedicatedAllocation;
4034 bool m_AllocationCallbacksSpecified;
4035 VkAllocationCallbacks m_AllocationCallbacks;
4039 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4040 VMA_MUTEX m_HeapSizeLimitMutex;
4042 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4043 VkPhysicalDeviceMemoryProperties m_MemProps;
4046 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4049 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4050 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4051 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4056 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4058 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4062 return m_VulkanFunctions;
4065 VkDeviceSize GetBufferImageGranularity()
const 4068 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4069 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4072 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4073 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4075 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4077 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4078 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4081 void GetBufferMemoryRequirements(
4083 VkMemoryRequirements& memReq,
4084 bool& requiresDedicatedAllocation,
4085 bool& prefersDedicatedAllocation)
const;
4086 void GetImageMemoryRequirements(
4088 VkMemoryRequirements& memReq,
4089 bool& requiresDedicatedAllocation,
4090 bool& prefersDedicatedAllocation)
const;
4093 VkResult AllocateMemory(
4094 const VkMemoryRequirements& vkMemReq,
4095 bool requiresDedicatedAllocation,
4096 bool prefersDedicatedAllocation,
4097 VkBuffer dedicatedBuffer,
4098 VkImage dedicatedImage,
4100 VmaSuballocationType suballocType,
4101 VmaAllocation* pAllocation);
4104 void FreeMemory(
const VmaAllocation allocation);
4106 void CalculateStats(
VmaStats* pStats);
4108 #if VMA_STATS_STRING_ENABLED 4109 void PrintDetailedMap(
class VmaJsonWriter& json);
4112 VkResult Defragment(
4113 VmaAllocation* pAllocations,
4114 size_t allocationCount,
4115 VkBool32* pAllocationsChanged,
4119 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
4120 bool TouchAllocation(VmaAllocation hAllocation);
4123 void DestroyPool(VmaPool pool);
4124 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
4126 void SetCurrentFrameIndex(uint32_t frameIndex);
4128 void MakePoolAllocationsLost(
4130 size_t* pLostAllocationCount);
4132 void CreateLostAllocation(VmaAllocation* pAllocation);
4134 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4135 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4137 VkResult Map(VmaAllocation hAllocation,
void** ppData);
4138 void Unmap(VmaAllocation hAllocation);
4141 VkDeviceSize m_PreferredLargeHeapBlockSize;
4143 VkPhysicalDevice m_PhysicalDevice;
4144 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4146 VMA_MUTEX m_PoolsMutex;
4148 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4154 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4156 VkResult AllocateMemoryOfType(
4157 const VkMemoryRequirements& vkMemReq,
4158 bool dedicatedAllocation,
4159 VkBuffer dedicatedBuffer,
4160 VkImage dedicatedImage,
4162 uint32_t memTypeIndex,
4163 VmaSuballocationType suballocType,
4164 VmaAllocation* pAllocation);
4167 VkResult AllocateDedicatedMemory(
4169 VmaSuballocationType suballocType,
4170 uint32_t memTypeIndex,
4172 bool isUserDataString,
4174 VkBuffer dedicatedBuffer,
4175 VkImage dedicatedImage,
4176 VmaAllocation* pAllocation);
4179 void FreeDedicatedMemory(VmaAllocation allocation);
4185 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
4187 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4190 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
4192 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4195 template<
typename T>
4196 static T* VmaAllocate(VmaAllocator hAllocator)
4198 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4201 template<
typename T>
4202 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
4204 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4207 template<
typename T>
4208 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4213 VmaFree(hAllocator, ptr);
4217 template<
typename T>
4218 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4222 for(
size_t i = count; i--; )
4224 VmaFree(hAllocator, ptr);
4231 #if VMA_STATS_STRING_ENABLED 4233 class VmaStringBuilder
4236 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4237 size_t GetLength()
const {
return m_Data.size(); }
4238 const char* GetData()
const {
return m_Data.data(); }
4240 void Add(
char ch) { m_Data.push_back(ch); }
4241 void Add(
const char* pStr);
4242 void AddNewLine() { Add(
'\n'); }
4243 void AddNumber(uint32_t num);
4244 void AddNumber(uint64_t num);
4245 void AddPointer(
const void* ptr);
4248 VmaVector< char, VmaStlAllocator<char> > m_Data;
4251 void VmaStringBuilder::Add(
const char* pStr)
4253 const size_t strLen = strlen(pStr);
4256 const size_t oldCount = m_Data.size();
4257 m_Data.resize(oldCount + strLen);
4258 memcpy(m_Data.data() + oldCount, pStr, strLen);
4262 void VmaStringBuilder::AddNumber(uint32_t num)
4265 VmaUint32ToStr(buf,
sizeof(buf), num);
4269 void VmaStringBuilder::AddNumber(uint64_t num)
4272 VmaUint64ToStr(buf,
sizeof(buf), num);
4276 void VmaStringBuilder::AddPointer(
const void* ptr)
4279 VmaPtrToStr(buf,
sizeof(buf), ptr);
4283 #endif // #if VMA_STATS_STRING_ENABLED 4288 #if VMA_STATS_STRING_ENABLED 4293 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4296 void BeginObject(
bool singleLine =
false);
4299 void BeginArray(
bool singleLine =
false);
4302 void WriteString(
const char* pStr);
4303 void BeginString(
const char* pStr = VMA_NULL);
4304 void ContinueString(
const char* pStr);
4305 void ContinueString(uint32_t n);
4306 void ContinueString(uint64_t n);
4307 void ContinueString_Pointer(
const void* ptr);
4308 void EndString(
const char* pStr = VMA_NULL);
4310 void WriteNumber(uint32_t n);
4311 void WriteNumber(uint64_t n);
4312 void WriteBool(
bool b);
4316 static const char*
const INDENT;
4318 enum COLLECTION_TYPE
4320 COLLECTION_TYPE_OBJECT,
4321 COLLECTION_TYPE_ARRAY,
4325 COLLECTION_TYPE type;
4326 uint32_t valueCount;
4327 bool singleLineMode;
4330 VmaStringBuilder& m_SB;
4331 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4332 bool m_InsideString;
4334 void BeginValue(
bool isString);
4335 void WriteIndent(
bool oneLess =
false);
4338 const char*
const VmaJsonWriter::INDENT =
" ";
4340 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4342 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4343 m_InsideString(false)
4347 VmaJsonWriter::~VmaJsonWriter()
4349 VMA_ASSERT(!m_InsideString);
4350 VMA_ASSERT(m_Stack.empty());
4353 void VmaJsonWriter::BeginObject(
bool singleLine)
4355 VMA_ASSERT(!m_InsideString);
4361 item.type = COLLECTION_TYPE_OBJECT;
4362 item.valueCount = 0;
4363 item.singleLineMode = singleLine;
4364 m_Stack.push_back(item);
4367 void VmaJsonWriter::EndObject()
4369 VMA_ASSERT(!m_InsideString);
4374 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4378 void VmaJsonWriter::BeginArray(
bool singleLine)
4380 VMA_ASSERT(!m_InsideString);
4386 item.type = COLLECTION_TYPE_ARRAY;
4387 item.valueCount = 0;
4388 item.singleLineMode = singleLine;
4389 m_Stack.push_back(item);
4392 void VmaJsonWriter::EndArray()
4394 VMA_ASSERT(!m_InsideString);
4399 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4403 void VmaJsonWriter::WriteString(
const char* pStr)
4409 void VmaJsonWriter::BeginString(
const char* pStr)
4411 VMA_ASSERT(!m_InsideString);
4415 m_InsideString =
true;
4416 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4418 ContinueString(pStr);
4422 void VmaJsonWriter::ContinueString(
const char* pStr)
4424 VMA_ASSERT(m_InsideString);
4426 const size_t strLen = strlen(pStr);
4427 for(
size_t i = 0; i < strLen; ++i)
4460 VMA_ASSERT(0 &&
"Character not currently supported.");
4466 void VmaJsonWriter::ContinueString(uint32_t n)
4468 VMA_ASSERT(m_InsideString);
4472 void VmaJsonWriter::ContinueString(uint64_t n)
4474 VMA_ASSERT(m_InsideString);
4478 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4480 VMA_ASSERT(m_InsideString);
4481 m_SB.AddPointer(ptr);
4484 void VmaJsonWriter::EndString(
const char* pStr)
4486 VMA_ASSERT(m_InsideString);
4487 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4489 ContinueString(pStr);
4492 m_InsideString =
false;
4495 void VmaJsonWriter::WriteNumber(uint32_t n)
4497 VMA_ASSERT(!m_InsideString);
4502 void VmaJsonWriter::WriteNumber(uint64_t n)
4504 VMA_ASSERT(!m_InsideString);
4509 void VmaJsonWriter::WriteBool(
bool b)
4511 VMA_ASSERT(!m_InsideString);
4513 m_SB.Add(b ?
"true" :
"false");
4516 void VmaJsonWriter::WriteNull()
4518 VMA_ASSERT(!m_InsideString);
4523 void VmaJsonWriter::BeginValue(
bool isString)
4525 if(!m_Stack.empty())
4527 StackItem& currItem = m_Stack.back();
4528 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4529 currItem.valueCount % 2 == 0)
4531 VMA_ASSERT(isString);
4534 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4535 currItem.valueCount % 2 != 0)
4539 else if(currItem.valueCount > 0)
4548 ++currItem.valueCount;
4552 void VmaJsonWriter::WriteIndent(
bool oneLess)
4554 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4558 size_t count = m_Stack.size();
4559 if(count > 0 && oneLess)
4563 for(
size_t i = 0; i < count; ++i)
4570 #endif // #if VMA_STATS_STRING_ENABLED 4574 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4576 if(IsUserDataString())
4578 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4580 FreeUserDataString(hAllocator);
4582 if(pUserData != VMA_NULL)
4584 const char*
const newStrSrc = (
char*)pUserData;
4585 const size_t newStrLen = strlen(newStrSrc);
4586 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4587 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4588 m_pUserData = newStrDst;
4593 m_pUserData = pUserData;
4597 void VmaAllocation_T::ChangeBlockAllocation(
4598 VmaAllocator hAllocator,
4599 VmaDeviceMemoryBlock* block,
4600 VkDeviceSize offset)
4602 VMA_ASSERT(block != VMA_NULL);
4603 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4606 if(block != m_BlockAllocation.m_Block)
4608 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4609 if(IsPersistentMap())
4611 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4612 block->Map(hAllocator, mapRefCount, VMA_NULL);
4615 m_BlockAllocation.m_Block = block;
4616 m_BlockAllocation.m_Offset = offset;
4619 VkDeviceSize VmaAllocation_T::GetOffset()
const 4623 case ALLOCATION_TYPE_BLOCK:
4624 return m_BlockAllocation.m_Offset;
4625 case ALLOCATION_TYPE_DEDICATED:
4633 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4637 case ALLOCATION_TYPE_BLOCK:
4638 return m_BlockAllocation.m_Block->m_hMemory;
4639 case ALLOCATION_TYPE_DEDICATED:
4640 return m_DedicatedAllocation.m_hMemory;
4643 return VK_NULL_HANDLE;
4647 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4651 case ALLOCATION_TYPE_BLOCK:
4652 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4653 case ALLOCATION_TYPE_DEDICATED:
4654 return m_DedicatedAllocation.m_MemoryTypeIndex;
4661 void* VmaAllocation_T::GetMappedData()
const 4665 case ALLOCATION_TYPE_BLOCK:
4668 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4669 VMA_ASSERT(pBlockData != VMA_NULL);
4670 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4677 case ALLOCATION_TYPE_DEDICATED:
4678 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4679 return m_DedicatedAllocation.m_pMappedData;
4686 bool VmaAllocation_T::CanBecomeLost()
const 4690 case ALLOCATION_TYPE_BLOCK:
4691 return m_BlockAllocation.m_CanBecomeLost;
4692 case ALLOCATION_TYPE_DEDICATED:
4700 VmaPool VmaAllocation_T::GetPool()
const 4702 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4703 return m_BlockAllocation.m_hPool;
4706 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4708 VMA_ASSERT(CanBecomeLost());
4714 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4717 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4722 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4728 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4738 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4740 VMA_ASSERT(IsUserDataString());
4741 if(m_pUserData != VMA_NULL)
4743 char*
const oldStr = (
char*)m_pUserData;
4744 const size_t oldStrLen = strlen(oldStr);
4745 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4746 m_pUserData = VMA_NULL;
4750 void VmaAllocation_T::BlockAllocMap()
4752 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4754 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4760 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4764 void VmaAllocation_T::BlockAllocUnmap()
4766 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4768 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4774 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4778 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4780 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4784 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4786 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4787 *ppData = m_DedicatedAllocation.m_pMappedData;
4793 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4794 return VK_ERROR_MEMORY_MAP_FAILED;
4799 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4800 hAllocator->m_hDevice,
4801 m_DedicatedAllocation.m_hMemory,
4806 if(result == VK_SUCCESS)
4808 m_DedicatedAllocation.m_pMappedData = *ppData;
4815 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4817 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4819 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4824 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4825 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4826 hAllocator->m_hDevice,
4827 m_DedicatedAllocation.m_hMemory);
4832 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4836 #if VMA_STATS_STRING_ENABLED 4839 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4848 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4852 json.WriteString(
"Blocks");
4855 json.WriteString(
"Allocations");
4858 json.WriteString(
"UnusedRanges");
4861 json.WriteString(
"UsedBytes");
4864 json.WriteString(
"UnusedBytes");
4869 json.WriteString(
"AllocationSize");
4870 json.BeginObject(
true);
4871 json.WriteString(
"Min");
4873 json.WriteString(
"Avg");
4875 json.WriteString(
"Max");
4882 json.WriteString(
"UnusedRangeSize");
4883 json.BeginObject(
true);
4884 json.WriteString(
"Min");
4886 json.WriteString(
"Avg");
4888 json.WriteString(
"Max");
4896 #endif // #if VMA_STATS_STRING_ENABLED 4898 struct VmaSuballocationItemSizeLess
4901 const VmaSuballocationList::iterator lhs,
4902 const VmaSuballocationList::iterator rhs)
const 4904 return lhs->size < rhs->size;
4907 const VmaSuballocationList::iterator lhs,
4908 VkDeviceSize rhsSize)
const 4910 return lhs->size < rhsSize;
4917 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4921 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4922 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4926 VmaBlockMetadata::~VmaBlockMetadata()
4930 void VmaBlockMetadata::Init(VkDeviceSize size)
4934 m_SumFreeSize = size;
4936 VmaSuballocation suballoc = {};
4937 suballoc.offset = 0;
4938 suballoc.size = size;
4939 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4940 suballoc.hAllocation = VK_NULL_HANDLE;
4942 m_Suballocations.push_back(suballoc);
4943 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4945 m_FreeSuballocationsBySize.push_back(suballocItem);
4948 bool VmaBlockMetadata::Validate()
const 4950 if(m_Suballocations.empty())
4956 VkDeviceSize calculatedOffset = 0;
4958 uint32_t calculatedFreeCount = 0;
4960 VkDeviceSize calculatedSumFreeSize = 0;
4963 size_t freeSuballocationsToRegister = 0;
4965 bool prevFree =
false;
4967 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4968 suballocItem != m_Suballocations.cend();
4971 const VmaSuballocation& subAlloc = *suballocItem;
4974 if(subAlloc.offset != calculatedOffset)
4979 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4981 if(prevFree && currFree)
4986 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4993 calculatedSumFreeSize += subAlloc.size;
4994 ++calculatedFreeCount;
4995 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4997 ++freeSuballocationsToRegister;
5002 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5006 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5012 calculatedOffset += subAlloc.size;
5013 prevFree = currFree;
5018 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5023 VkDeviceSize lastSize = 0;
5024 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5026 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5029 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5034 if(suballocItem->size < lastSize)
5039 lastSize = suballocItem->size;
5043 if(!ValidateFreeSuballocationList() ||
5044 (calculatedOffset != m_Size) ||
5045 (calculatedSumFreeSize != m_SumFreeSize) ||
5046 (calculatedFreeCount != m_FreeCount))
5054 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5056 if(!m_FreeSuballocationsBySize.empty())
5058 return m_FreeSuballocationsBySize.back()->size;
5066 bool VmaBlockMetadata::IsEmpty()
const 5068 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5071 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5075 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5087 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5088 suballocItem != m_Suballocations.cend();
5091 const VmaSuballocation& suballoc = *suballocItem;
5092 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5105 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5107 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5109 inoutStats.
size += m_Size;
5116 #if VMA_STATS_STRING_ENABLED 5118 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5122 json.WriteString(
"TotalBytes");
5123 json.WriteNumber(m_Size);
5125 json.WriteString(
"UnusedBytes");
5126 json.WriteNumber(m_SumFreeSize);
5128 json.WriteString(
"Allocations");
5129 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5131 json.WriteString(
"UnusedRanges");
5132 json.WriteNumber(m_FreeCount);
5134 json.WriteString(
"Suballocations");
5137 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5138 suballocItem != m_Suballocations.cend();
5139 ++suballocItem, ++i)
5141 json.BeginObject(
true);
5143 json.WriteString(
"Type");
5144 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5146 json.WriteString(
"Size");
5147 json.WriteNumber(suballocItem->size);
5149 json.WriteString(
"Offset");
5150 json.WriteNumber(suballocItem->offset);
5152 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5154 const void* pUserData = suballocItem->hAllocation->GetUserData();
5155 if(pUserData != VMA_NULL)
5157 json.WriteString(
"UserData");
5158 if(suballocItem->hAllocation->IsUserDataString())
5160 json.WriteString((
const char*)pUserData);
5165 json.ContinueString_Pointer(pUserData);
5178 #endif // #if VMA_STATS_STRING_ENABLED 5190 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5192 VMA_ASSERT(IsEmpty());
5193 pAllocationRequest->offset = 0;
5194 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5195 pAllocationRequest->sumItemSize = 0;
5196 pAllocationRequest->item = m_Suballocations.begin();
5197 pAllocationRequest->itemsToMakeLostCount = 0;
5200 bool VmaBlockMetadata::CreateAllocationRequest(
5201 uint32_t currentFrameIndex,
5202 uint32_t frameInUseCount,
5203 VkDeviceSize bufferImageGranularity,
5204 VkDeviceSize allocSize,
5205 VkDeviceSize allocAlignment,
5206 VmaSuballocationType allocType,
5207 bool canMakeOtherLost,
5208 VmaAllocationRequest* pAllocationRequest)
5210 VMA_ASSERT(allocSize > 0);
5211 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5212 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5213 VMA_HEAVY_ASSERT(Validate());
5216 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5222 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5223 if(freeSuballocCount > 0)
5228 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5229 m_FreeSuballocationsBySize.data(),
5230 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5232 VmaSuballocationItemSizeLess());
5233 size_t index = it - m_FreeSuballocationsBySize.data();
5234 for(; index < freeSuballocCount; ++index)
5239 bufferImageGranularity,
5243 m_FreeSuballocationsBySize[index],
5245 &pAllocationRequest->offset,
5246 &pAllocationRequest->itemsToMakeLostCount,
5247 &pAllocationRequest->sumFreeSize,
5248 &pAllocationRequest->sumItemSize))
5250 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5258 for(
size_t index = freeSuballocCount; index--; )
5263 bufferImageGranularity,
5267 m_FreeSuballocationsBySize[index],
5269 &pAllocationRequest->offset,
5270 &pAllocationRequest->itemsToMakeLostCount,
5271 &pAllocationRequest->sumFreeSize,
5272 &pAllocationRequest->sumItemSize))
5274 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5281 if(canMakeOtherLost)
5285 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5286 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5288 VmaAllocationRequest tmpAllocRequest = {};
5289 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5290 suballocIt != m_Suballocations.end();
5293 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5294 suballocIt->hAllocation->CanBecomeLost())
5299 bufferImageGranularity,
5305 &tmpAllocRequest.offset,
5306 &tmpAllocRequest.itemsToMakeLostCount,
5307 &tmpAllocRequest.sumFreeSize,
5308 &tmpAllocRequest.sumItemSize))
5310 tmpAllocRequest.item = suballocIt;
5312 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5314 *pAllocationRequest = tmpAllocRequest;
5320 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5329 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5330 uint32_t currentFrameIndex,
5331 uint32_t frameInUseCount,
5332 VmaAllocationRequest* pAllocationRequest)
5334 while(pAllocationRequest->itemsToMakeLostCount > 0)
5336 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5338 ++pAllocationRequest->item;
5340 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5341 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5342 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5343 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5345 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5346 --pAllocationRequest->itemsToMakeLostCount;
5354 VMA_HEAVY_ASSERT(Validate());
5355 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5356 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5361 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5363 uint32_t lostAllocationCount = 0;
5364 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5365 it != m_Suballocations.end();
5368 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5369 it->hAllocation->CanBecomeLost() &&
5370 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5372 it = FreeSuballocation(it);
5373 ++lostAllocationCount;
5376 return lostAllocationCount;
5379 void VmaBlockMetadata::Alloc(
5380 const VmaAllocationRequest& request,
5381 VmaSuballocationType type,
5382 VkDeviceSize allocSize,
5383 VmaAllocation hAllocation)
5385 VMA_ASSERT(request.item != m_Suballocations.end());
5386 VmaSuballocation& suballoc = *request.item;
5388 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5390 VMA_ASSERT(request.offset >= suballoc.offset);
5391 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5392 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5393 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5397 UnregisterFreeSuballocation(request.item);
5399 suballoc.offset = request.offset;
5400 suballoc.size = allocSize;
5401 suballoc.type = type;
5402 suballoc.hAllocation = hAllocation;
5407 VmaSuballocation paddingSuballoc = {};
5408 paddingSuballoc.offset = request.offset + allocSize;
5409 paddingSuballoc.size = paddingEnd;
5410 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5411 VmaSuballocationList::iterator next = request.item;
5413 const VmaSuballocationList::iterator paddingEndItem =
5414 m_Suballocations.insert(next, paddingSuballoc);
5415 RegisterFreeSuballocation(paddingEndItem);
5421 VmaSuballocation paddingSuballoc = {};
5422 paddingSuballoc.offset = request.offset - paddingBegin;
5423 paddingSuballoc.size = paddingBegin;
5424 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5425 const VmaSuballocationList::iterator paddingBeginItem =
5426 m_Suballocations.insert(request.item, paddingSuballoc);
5427 RegisterFreeSuballocation(paddingBeginItem);
5431 m_FreeCount = m_FreeCount - 1;
5432 if(paddingBegin > 0)
5440 m_SumFreeSize -= allocSize;
5443 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5445 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5446 suballocItem != m_Suballocations.end();
5449 VmaSuballocation& suballoc = *suballocItem;
5450 if(suballoc.hAllocation == allocation)
5452 FreeSuballocation(suballocItem);
5453 VMA_HEAVY_ASSERT(Validate());
5457 VMA_ASSERT(0 &&
"Not found!");
5460 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5462 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5463 suballocItem != m_Suballocations.end();
5466 VmaSuballocation& suballoc = *suballocItem;
5467 if(suballoc.offset == offset)
5469 FreeSuballocation(suballocItem);
5473 VMA_ASSERT(0 &&
"Not found!");
5476 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5478 VkDeviceSize lastSize = 0;
5479 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5481 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5483 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5488 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5493 if(it->size < lastSize)
5499 lastSize = it->size;
5504 bool VmaBlockMetadata::CheckAllocation(
5505 uint32_t currentFrameIndex,
5506 uint32_t frameInUseCount,
5507 VkDeviceSize bufferImageGranularity,
5508 VkDeviceSize allocSize,
5509 VkDeviceSize allocAlignment,
5510 VmaSuballocationType allocType,
5511 VmaSuballocationList::const_iterator suballocItem,
5512 bool canMakeOtherLost,
5513 VkDeviceSize* pOffset,
5514 size_t* itemsToMakeLostCount,
5515 VkDeviceSize* pSumFreeSize,
5516 VkDeviceSize* pSumItemSize)
const 5518 VMA_ASSERT(allocSize > 0);
5519 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5520 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5521 VMA_ASSERT(pOffset != VMA_NULL);
5523 *itemsToMakeLostCount = 0;
5527 if(canMakeOtherLost)
5529 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5531 *pSumFreeSize = suballocItem->size;
5535 if(suballocItem->hAllocation->CanBecomeLost() &&
5536 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5538 ++*itemsToMakeLostCount;
5539 *pSumItemSize = suballocItem->size;
5548 if(m_Size - suballocItem->offset < allocSize)
5554 *pOffset = suballocItem->offset;
5557 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5559 *pOffset += VMA_DEBUG_MARGIN;
5563 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5564 *pOffset = VmaAlignUp(*pOffset, alignment);
5568 if(bufferImageGranularity > 1)
5570 bool bufferImageGranularityConflict =
false;
5571 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5572 while(prevSuballocItem != m_Suballocations.cbegin())
5575 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5576 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5578 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5580 bufferImageGranularityConflict =
true;
5588 if(bufferImageGranularityConflict)
5590 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5596 if(*pOffset >= suballocItem->offset + suballocItem->size)
5602 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5605 VmaSuballocationList::const_iterator next = suballocItem;
5607 const VkDeviceSize requiredEndMargin =
5608 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5610 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5612 if(suballocItem->offset + totalSize > m_Size)
5619 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5620 if(totalSize > suballocItem->size)
5622 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5623 while(remainingSize > 0)
5626 if(lastSuballocItem == m_Suballocations.cend())
5630 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5632 *pSumFreeSize += lastSuballocItem->size;
5636 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5637 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5638 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5640 ++*itemsToMakeLostCount;
5641 *pSumItemSize += lastSuballocItem->size;
5648 remainingSize = (lastSuballocItem->size < remainingSize) ?
5649 remainingSize - lastSuballocItem->size : 0;
5655 if(bufferImageGranularity > 1)
5657 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5659 while(nextSuballocItem != m_Suballocations.cend())
5661 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5662 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5664 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5666 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5667 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5668 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5670 ++*itemsToMakeLostCount;
5689 const VmaSuballocation& suballoc = *suballocItem;
5690 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5692 *pSumFreeSize = suballoc.size;
5695 if(suballoc.size < allocSize)
5701 *pOffset = suballoc.offset;
5704 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5706 *pOffset += VMA_DEBUG_MARGIN;
5710 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5711 *pOffset = VmaAlignUp(*pOffset, alignment);
5715 if(bufferImageGranularity > 1)
5717 bool bufferImageGranularityConflict =
false;
5718 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5719 while(prevSuballocItem != m_Suballocations.cbegin())
5722 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5723 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5725 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5727 bufferImageGranularityConflict =
true;
5735 if(bufferImageGranularityConflict)
5737 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5742 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5745 VmaSuballocationList::const_iterator next = suballocItem;
5747 const VkDeviceSize requiredEndMargin =
5748 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5751 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5758 if(bufferImageGranularity > 1)
5760 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5762 while(nextSuballocItem != m_Suballocations.cend())
5764 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5765 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5767 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5786 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5788 VMA_ASSERT(item != m_Suballocations.end());
5789 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5791 VmaSuballocationList::iterator nextItem = item;
5793 VMA_ASSERT(nextItem != m_Suballocations.end());
5794 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5796 item->size += nextItem->size;
5798 m_Suballocations.erase(nextItem);
5801 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5804 VmaSuballocation& suballoc = *suballocItem;
5805 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5806 suballoc.hAllocation = VK_NULL_HANDLE;
5810 m_SumFreeSize += suballoc.size;
5813 bool mergeWithNext =
false;
5814 bool mergeWithPrev =
false;
5816 VmaSuballocationList::iterator nextItem = suballocItem;
5818 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5820 mergeWithNext =
true;
5823 VmaSuballocationList::iterator prevItem = suballocItem;
5824 if(suballocItem != m_Suballocations.begin())
5827 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5829 mergeWithPrev =
true;
5835 UnregisterFreeSuballocation(nextItem);
5836 MergeFreeWithNext(suballocItem);
5841 UnregisterFreeSuballocation(prevItem);
5842 MergeFreeWithNext(prevItem);
5843 RegisterFreeSuballocation(prevItem);
5848 RegisterFreeSuballocation(suballocItem);
5849 return suballocItem;
5853 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5855 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5856 VMA_ASSERT(item->size > 0);
5860 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5862 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5864 if(m_FreeSuballocationsBySize.empty())
5866 m_FreeSuballocationsBySize.push_back(item);
5870 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5878 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5880 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5881 VMA_ASSERT(item->size > 0);
5885 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5887 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5889 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5890 m_FreeSuballocationsBySize.data(),
5891 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5893 VmaSuballocationItemSizeLess());
5894 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5895 index < m_FreeSuballocationsBySize.size();
5898 if(m_FreeSuballocationsBySize[index] == item)
5900 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5903 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5905 VMA_ASSERT(0 &&
"Not found.");
5914 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5916 m_pMappedData(VMA_NULL)
5920 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5922 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5925 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData)
5932 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5935 m_MapCount += count;
5936 VMA_ASSERT(m_pMappedData != VMA_NULL);
5937 if(ppData != VMA_NULL)
5939 *ppData = m_pMappedData;
5945 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5946 hAllocator->m_hDevice,
5952 if(result == VK_SUCCESS)
5954 if(ppData != VMA_NULL)
5956 *ppData = m_pMappedData;
5964 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count)
5971 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5972 if(m_MapCount >= count)
5974 m_MapCount -= count;
5977 m_pMappedData = VMA_NULL;
5978 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
5983 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
5990 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5991 m_MemoryTypeIndex(UINT32_MAX),
5992 m_hMemory(VK_NULL_HANDLE),
5993 m_Metadata(hAllocator)
5997 void VmaDeviceMemoryBlock::Init(
5998 uint32_t newMemoryTypeIndex,
5999 VkDeviceMemory newMemory,
6000 VkDeviceSize newSize)
6002 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6004 m_MemoryTypeIndex = newMemoryTypeIndex;
6005 m_hMemory = newMemory;
6007 m_Metadata.Init(newSize);
6010 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
6014 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6016 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6017 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6018 m_hMemory = VK_NULL_HANDLE;
6021 bool VmaDeviceMemoryBlock::Validate()
const 6023 if((m_hMemory == VK_NULL_HANDLE) ||
6024 (m_Metadata.GetSize() == 0))
6029 return m_Metadata.Validate();
6032 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count,
void** ppData)
6034 return m_Mapping.Map(hAllocator, m_hMemory, count, ppData);
6037 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
6039 m_Mapping.Unmap(hAllocator, m_hMemory, count);
6044 memset(&outInfo, 0,
sizeof(outInfo));
6063 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6071 VmaPool_T::VmaPool_T(
6072 VmaAllocator hAllocator,
6076 createInfo.memoryTypeIndex,
6077 createInfo.blockSize,
6078 createInfo.minBlockCount,
6079 createInfo.maxBlockCount,
6081 createInfo.frameInUseCount,
6086 VmaPool_T::~VmaPool_T()
6090 #if VMA_STATS_STRING_ENABLED 6092 #endif // #if VMA_STATS_STRING_ENABLED 6094 VmaBlockVector::VmaBlockVector(
6095 VmaAllocator hAllocator,
6096 uint32_t memoryTypeIndex,
6097 VkDeviceSize preferredBlockSize,
6098 size_t minBlockCount,
6099 size_t maxBlockCount,
6100 VkDeviceSize bufferImageGranularity,
6101 uint32_t frameInUseCount,
6102 bool isCustomPool) :
6103 m_hAllocator(hAllocator),
6104 m_MemoryTypeIndex(memoryTypeIndex),
6105 m_PreferredBlockSize(preferredBlockSize),
6106 m_MinBlockCount(minBlockCount),
6107 m_MaxBlockCount(maxBlockCount),
6108 m_BufferImageGranularity(bufferImageGranularity),
6109 m_FrameInUseCount(frameInUseCount),
6110 m_IsCustomPool(isCustomPool),
6111 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6112 m_HasEmptyBlock(false),
6113 m_pDefragmentator(VMA_NULL)
6117 VmaBlockVector::~VmaBlockVector()
6119 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6121 for(
size_t i = m_Blocks.size(); i--; )
6123 m_Blocks[i]->Destroy(m_hAllocator);
6124 vma_delete(m_hAllocator, m_Blocks[i]);
6128 VkResult VmaBlockVector::CreateMinBlocks()
6130 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6132 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6133 if(res != VK_SUCCESS)
6141 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6149 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6151 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6153 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6155 VMA_HEAVY_ASSERT(pBlock->Validate());
6156 pBlock->m_Metadata.AddPoolStats(*pStats);
6160 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6162 VkResult VmaBlockVector::Allocate(
6163 VmaPool hCurrentPool,
6164 uint32_t currentFrameIndex,
6165 const VkMemoryRequirements& vkMemReq,
6167 VmaSuballocationType suballocType,
6168 VmaAllocation* pAllocation)
6173 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6177 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6179 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6180 VMA_ASSERT(pCurrBlock);
6181 VmaAllocationRequest currRequest = {};
6182 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6185 m_BufferImageGranularity,
6193 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6197 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6198 if(res != VK_SUCCESS)
6205 if(pCurrBlock->m_Metadata.IsEmpty())
6207 m_HasEmptyBlock =
false;
6210 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6211 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6212 (*pAllocation)->InitBlockAllocation(
6221 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6222 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6223 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6228 const bool canCreateNewBlock =
6230 (m_Blocks.size() < m_MaxBlockCount);
6233 if(canCreateNewBlock)
6236 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6237 uint32_t newBlockSizeShift = 0;
6238 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6242 if(m_IsCustomPool ==
false)
6245 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6246 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6248 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6249 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6251 newBlockSize = smallerNewBlockSize;
6252 ++newBlockSizeShift;
6261 size_t newBlockIndex = 0;
6262 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6264 if(m_IsCustomPool ==
false)
6266 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6268 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6269 if(smallerNewBlockSize >= vkMemReq.size)
6271 newBlockSize = smallerNewBlockSize;
6272 ++newBlockSizeShift;
6273 res = CreateBlock(newBlockSize, &newBlockIndex);
6282 if(res == VK_SUCCESS)
6284 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6285 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6289 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6290 if(res != VK_SUCCESS)
6297 VmaAllocationRequest allocRequest;
6298 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6299 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6300 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6301 (*pAllocation)->InitBlockAllocation(
6304 allocRequest.offset,
6310 VMA_HEAVY_ASSERT(pBlock->Validate());
6311 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6312 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6320 if(canMakeOtherLost)
6322 uint32_t tryIndex = 0;
6323 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6325 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6326 VmaAllocationRequest bestRequest = {};
6327 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6331 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6333 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6334 VMA_ASSERT(pCurrBlock);
6335 VmaAllocationRequest currRequest = {};
6336 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6339 m_BufferImageGranularity,
6346 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6347 if(pBestRequestBlock == VMA_NULL ||
6348 currRequestCost < bestRequestCost)
6350 pBestRequestBlock = pCurrBlock;
6351 bestRequest = currRequest;
6352 bestRequestCost = currRequestCost;
6354 if(bestRequestCost == 0)
6362 if(pBestRequestBlock != VMA_NULL)
6366 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6367 if(res != VK_SUCCESS)
6373 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6379 if(pBestRequestBlock->m_Metadata.IsEmpty())
6381 m_HasEmptyBlock =
false;
6384 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6385 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6386 (*pAllocation)->InitBlockAllocation(
6395 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6396 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6397 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6411 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6413 return VK_ERROR_TOO_MANY_OBJECTS;
6417 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6420 void VmaBlockVector::Free(
6421 VmaAllocation hAllocation)
6423 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6427 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6429 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6431 if(hAllocation->IsPersistentMap())
6433 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory, 1);
6436 pBlock->m_Metadata.Free(hAllocation);
6437 VMA_HEAVY_ASSERT(pBlock->Validate());
6439 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6442 if(pBlock->m_Metadata.IsEmpty())
6445 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6447 pBlockToDelete = pBlock;
6453 m_HasEmptyBlock =
true;
6458 else if(m_HasEmptyBlock)
6460 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6461 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6463 pBlockToDelete = pLastBlock;
6464 m_Blocks.pop_back();
6465 m_HasEmptyBlock =
false;
6469 IncrementallySortBlocks();
6474 if(pBlockToDelete != VMA_NULL)
6476 VMA_DEBUG_LOG(
" Deleted empty allocation");
6477 pBlockToDelete->Destroy(m_hAllocator);
6478 vma_delete(m_hAllocator, pBlockToDelete);
6482 size_t VmaBlockVector::CalcMaxBlockSize()
const 6485 for(
size_t i = m_Blocks.size(); i--; )
6487 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6488 if(result >= m_PreferredBlockSize)
6496 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6498 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6500 if(m_Blocks[blockIndex] == pBlock)
6502 VmaVectorRemove(m_Blocks, blockIndex);
6509 void VmaBlockVector::IncrementallySortBlocks()
6512 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6514 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6516 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6522 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6524 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6525 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6526 allocInfo.allocationSize = blockSize;
6527 VkDeviceMemory mem = VK_NULL_HANDLE;
6528 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6537 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6541 allocInfo.allocationSize);
6543 m_Blocks.push_back(pBlock);
6544 if(pNewBlockIndex != VMA_NULL)
6546 *pNewBlockIndex = m_Blocks.size() - 1;
6552 #if VMA_STATS_STRING_ENABLED 6554 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6556 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6562 json.WriteString(
"MemoryTypeIndex");
6563 json.WriteNumber(m_MemoryTypeIndex);
6565 json.WriteString(
"BlockSize");
6566 json.WriteNumber(m_PreferredBlockSize);
6568 json.WriteString(
"BlockCount");
6569 json.BeginObject(
true);
6570 if(m_MinBlockCount > 0)
6572 json.WriteString(
"Min");
6573 json.WriteNumber((uint64_t)m_MinBlockCount);
6575 if(m_MaxBlockCount < SIZE_MAX)
6577 json.WriteString(
"Max");
6578 json.WriteNumber((uint64_t)m_MaxBlockCount);
6580 json.WriteString(
"Cur");
6581 json.WriteNumber((uint64_t)m_Blocks.size());
6584 if(m_FrameInUseCount > 0)
6586 json.WriteString(
"FrameInUseCount");
6587 json.WriteNumber(m_FrameInUseCount);
6592 json.WriteString(
"PreferredBlockSize");
6593 json.WriteNumber(m_PreferredBlockSize);
6596 json.WriteString(
"Blocks");
6598 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6600 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6607 #endif // #if VMA_STATS_STRING_ENABLED 6609 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6610 VmaAllocator hAllocator,
6611 uint32_t currentFrameIndex)
6613 if(m_pDefragmentator == VMA_NULL)
6615 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6621 return m_pDefragmentator;
6624 VkResult VmaBlockVector::Defragment(
6626 VkDeviceSize& maxBytesToMove,
6627 uint32_t& maxAllocationsToMove)
6629 if(m_pDefragmentator == VMA_NULL)
6634 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6637 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6640 if(pDefragmentationStats != VMA_NULL)
6642 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6643 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6646 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6647 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6653 m_HasEmptyBlock =
false;
6654 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6656 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6657 if(pBlock->m_Metadata.IsEmpty())
6659 if(m_Blocks.size() > m_MinBlockCount)
6661 if(pDefragmentationStats != VMA_NULL)
6664 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6667 VmaVectorRemove(m_Blocks, blockIndex);
6668 pBlock->Destroy(m_hAllocator);
6669 vma_delete(m_hAllocator, pBlock);
6673 m_HasEmptyBlock =
true;
6681 void VmaBlockVector::DestroyDefragmentator()
6683 if(m_pDefragmentator != VMA_NULL)
6685 vma_delete(m_hAllocator, m_pDefragmentator);
6686 m_pDefragmentator = VMA_NULL;
6690 void VmaBlockVector::MakePoolAllocationsLost(
6691 uint32_t currentFrameIndex,
6692 size_t* pLostAllocationCount)
6694 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6695 size_t lostAllocationCount = 0;
6696 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6698 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6700 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6702 if(pLostAllocationCount != VMA_NULL)
6704 *pLostAllocationCount = lostAllocationCount;
6708 void VmaBlockVector::AddStats(
VmaStats* pStats)
6710 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6711 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6713 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6715 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6717 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6719 VMA_HEAVY_ASSERT(pBlock->Validate());
6721 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6722 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6723 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6724 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6731 VmaDefragmentator::VmaDefragmentator(
6732 VmaAllocator hAllocator,
6733 VmaBlockVector* pBlockVector,
6734 uint32_t currentFrameIndex) :
6735 m_hAllocator(hAllocator),
6736 m_pBlockVector(pBlockVector),
6737 m_CurrentFrameIndex(currentFrameIndex),
6739 m_AllocationsMoved(0),
6740 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6741 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6745 VmaDefragmentator::~VmaDefragmentator()
6747 for(
size_t i = m_Blocks.size(); i--; )
6749 vma_delete(m_hAllocator, m_Blocks[i]);
6753 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6755 AllocationInfo allocInfo;
6756 allocInfo.m_hAllocation = hAlloc;
6757 allocInfo.m_pChanged = pChanged;
6758 m_Allocations.push_back(allocInfo);
6761 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6764 if(m_pMappedDataForDefragmentation)
6766 *ppMappedData = m_pMappedDataForDefragmentation;
6771 if(m_pBlock->m_Mapping.GetMappedData())
6773 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6778 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
6779 *ppMappedData = m_pMappedDataForDefragmentation;
6783 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6785 if(m_pMappedDataForDefragmentation != VMA_NULL)
6787 m_pBlock->Unmap(hAllocator, 1);
6791 VkResult VmaDefragmentator::DefragmentRound(
6792 VkDeviceSize maxBytesToMove,
6793 uint32_t maxAllocationsToMove)
6795 if(m_Blocks.empty())
6800 size_t srcBlockIndex = m_Blocks.size() - 1;
6801 size_t srcAllocIndex = SIZE_MAX;
6807 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6809 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6812 if(srcBlockIndex == 0)
6819 srcAllocIndex = SIZE_MAX;
6824 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6828 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6829 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6831 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6832 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6833 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6834 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6837 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6839 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6840 VmaAllocationRequest dstAllocRequest;
6841 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6842 m_CurrentFrameIndex,
6843 m_pBlockVector->GetFrameInUseCount(),
6844 m_pBlockVector->GetBufferImageGranularity(),
6849 &dstAllocRequest) &&
6851 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6853 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6856 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6857 (m_BytesMoved + size > maxBytesToMove))
6859 return VK_INCOMPLETE;
6862 void* pDstMappedData = VMA_NULL;
6863 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6864 if(res != VK_SUCCESS)
6869 void* pSrcMappedData = VMA_NULL;
6870 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6871 if(res != VK_SUCCESS)
6878 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6879 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6880 static_cast<size_t>(size));
6882 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6883 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
6885 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6887 if(allocInfo.m_pChanged != VMA_NULL)
6889 *allocInfo.m_pChanged = VK_TRUE;
6892 ++m_AllocationsMoved;
6893 m_BytesMoved += size;
6895 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6903 if(srcAllocIndex > 0)
6909 if(srcBlockIndex > 0)
6912 srcAllocIndex = SIZE_MAX;
6922 VkResult VmaDefragmentator::Defragment(
6923 VkDeviceSize maxBytesToMove,
6924 uint32_t maxAllocationsToMove)
6926 if(m_Allocations.empty())
6932 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6933 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6935 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6936 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6937 m_Blocks.push_back(pBlockInfo);
6941 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6944 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6946 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6948 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6950 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6951 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6952 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6954 (*it)->m_Allocations.push_back(allocInfo);
6962 m_Allocations.clear();
6964 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6966 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6967 pBlockInfo->CalcHasNonMovableAllocations();
6968 pBlockInfo->SortAllocationsBySizeDescecnding();
6972 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6975 VkResult result = VK_SUCCESS;
6976 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6978 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6982 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6984 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6990 bool VmaDefragmentator::MoveMakesSense(
6991 size_t dstBlockIndex, VkDeviceSize dstOffset,
6992 size_t srcBlockIndex, VkDeviceSize srcOffset)
6994 if(dstBlockIndex < srcBlockIndex)
6998 if(dstBlockIndex > srcBlockIndex)
7002 if(dstOffset < srcOffset)
7015 m_hDevice(pCreateInfo->device),
7016 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7017 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7018 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7019 m_PreferredLargeHeapBlockSize(0),
7020 m_PhysicalDevice(pCreateInfo->physicalDevice),
7021 m_CurrentFrameIndex(0),
7022 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
7026 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7027 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7028 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7030 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7031 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7033 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7035 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7046 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7047 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7054 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7056 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7057 if(limit != VK_WHOLE_SIZE)
7059 m_HeapSizeLimit[heapIndex] = limit;
7060 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7062 m_MemProps.memoryHeaps[heapIndex].size = limit;
7068 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7070 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7072 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7078 GetBufferImageGranularity(),
7083 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7087 VmaAllocator_T::~VmaAllocator_T()
7089 VMA_ASSERT(m_Pools.empty());
7091 for(
size_t i = GetMemoryTypeCount(); i--; )
7093 vma_delete(
this, m_pDedicatedAllocations[i]);
7094 vma_delete(
this, m_pBlockVectors[i]);
7098 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7100 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7101 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7102 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7103 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7104 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7105 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7106 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7107 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7108 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7109 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7110 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7111 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7112 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7113 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7114 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7115 if(m_UseKhrDedicatedAllocation)
7117 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7118 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7119 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7120 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7122 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7124 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7125 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7127 if(pVulkanFunctions != VMA_NULL)
7129 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7130 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7131 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7132 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7133 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7134 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7135 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7136 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7137 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7138 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7139 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7140 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7141 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7142 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7143 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7144 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7147 #undef VMA_COPY_IF_NOT_NULL 7151 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7152 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7153 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7154 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7155 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7156 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7157 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7158 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7159 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7160 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7161 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7162 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7163 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7164 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7165 if(m_UseKhrDedicatedAllocation)
7167 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7168 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7172 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7174 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7175 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7176 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7177 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7180 VkResult VmaAllocator_T::AllocateMemoryOfType(
7181 const VkMemoryRequirements& vkMemReq,
7182 bool dedicatedAllocation,
7183 VkBuffer dedicatedBuffer,
7184 VkImage dedicatedImage,
7186 uint32_t memTypeIndex,
7187 VmaSuballocationType suballocType,
7188 VmaAllocation* pAllocation)
7190 VMA_ASSERT(pAllocation != VMA_NULL);
7191 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7197 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7202 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7203 VMA_ASSERT(blockVector);
7205 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7206 bool preferDedicatedMemory =
7207 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7208 dedicatedAllocation ||
7210 vkMemReq.size > preferredBlockSize / 2;
7212 if(preferDedicatedMemory &&
7214 finalCreateInfo.
pool == VK_NULL_HANDLE)
7223 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7227 return AllocateDedicatedMemory(
7241 VkResult res = blockVector->Allocate(
7243 m_CurrentFrameIndex.load(),
7248 if(res == VK_SUCCESS)
7256 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7260 res = AllocateDedicatedMemory(
7266 finalCreateInfo.pUserData,
7270 if(res == VK_SUCCESS)
7273 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7279 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7286 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7288 VmaSuballocationType suballocType,
7289 uint32_t memTypeIndex,
7291 bool isUserDataString,
7293 VkBuffer dedicatedBuffer,
7294 VkImage dedicatedImage,
7295 VmaAllocation* pAllocation)
7297 VMA_ASSERT(pAllocation);
7299 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7300 allocInfo.memoryTypeIndex = memTypeIndex;
7301 allocInfo.allocationSize = size;
7303 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7304 if(m_UseKhrDedicatedAllocation)
7306 if(dedicatedBuffer != VK_NULL_HANDLE)
7308 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7309 dedicatedAllocInfo.buffer = dedicatedBuffer;
7310 allocInfo.pNext = &dedicatedAllocInfo;
7312 else if(dedicatedImage != VK_NULL_HANDLE)
7314 dedicatedAllocInfo.image = dedicatedImage;
7315 allocInfo.pNext = &dedicatedAllocInfo;
7320 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7321 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7324 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7328 void* pMappedData = VMA_NULL;
7331 res = (*m_VulkanFunctions.vkMapMemory)(
7340 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7341 FreeVulkanMemory(memTypeIndex, size, hMemory);
7346 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7347 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7348 (*pAllocation)->SetUserData(
this, pUserData);
7352 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7353 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7354 VMA_ASSERT(pDedicatedAllocations);
7355 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7358 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7363 void VmaAllocator_T::GetBufferMemoryRequirements(
7365 VkMemoryRequirements& memReq,
7366 bool& requiresDedicatedAllocation,
7367 bool& prefersDedicatedAllocation)
const 7369 if(m_UseKhrDedicatedAllocation)
7371 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7372 memReqInfo.buffer = hBuffer;
7374 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7376 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7377 memReq2.pNext = &memDedicatedReq;
7379 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7381 memReq = memReq2.memoryRequirements;
7382 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7383 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7387 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7388 requiresDedicatedAllocation =
false;
7389 prefersDedicatedAllocation =
false;
7393 void VmaAllocator_T::GetImageMemoryRequirements(
7395 VkMemoryRequirements& memReq,
7396 bool& requiresDedicatedAllocation,
7397 bool& prefersDedicatedAllocation)
const 7399 if(m_UseKhrDedicatedAllocation)
7401 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7402 memReqInfo.image = hImage;
7404 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7406 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7407 memReq2.pNext = &memDedicatedReq;
7409 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7411 memReq = memReq2.memoryRequirements;
7412 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7413 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7417 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7418 requiresDedicatedAllocation =
false;
7419 prefersDedicatedAllocation =
false;
7423 VkResult VmaAllocator_T::AllocateMemory(
7424 const VkMemoryRequirements& vkMemReq,
7425 bool requiresDedicatedAllocation,
7426 bool prefersDedicatedAllocation,
7427 VkBuffer dedicatedBuffer,
7428 VkImage dedicatedImage,
7430 VmaSuballocationType suballocType,
7431 VmaAllocation* pAllocation)
7436 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7437 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7442 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7443 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7445 if(requiresDedicatedAllocation)
7449 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7450 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7452 if(createInfo.
pool != VK_NULL_HANDLE)
7454 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7455 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7458 if((createInfo.
pool != VK_NULL_HANDLE) &&
7461 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7462 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7465 if(createInfo.
pool != VK_NULL_HANDLE)
7467 return createInfo.
pool->m_BlockVector.Allocate(
7469 m_CurrentFrameIndex.load(),
7478 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7479 uint32_t memTypeIndex = UINT32_MAX;
7481 if(res == VK_SUCCESS)
7483 res = AllocateMemoryOfType(
7485 requiresDedicatedAllocation || prefersDedicatedAllocation,
7493 if(res == VK_SUCCESS)
7503 memoryTypeBits &= ~(1u << memTypeIndex);
7506 if(res == VK_SUCCESS)
7508 res = AllocateMemoryOfType(
7510 requiresDedicatedAllocation || prefersDedicatedAllocation,
7518 if(res == VK_SUCCESS)
7528 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7539 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7541 VMA_ASSERT(allocation);
7543 if(allocation->CanBecomeLost() ==
false ||
7544 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7546 switch(allocation->GetType())
7548 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7550 VmaBlockVector* pBlockVector = VMA_NULL;
7551 VmaPool hPool = allocation->GetPool();
7552 if(hPool != VK_NULL_HANDLE)
7554 pBlockVector = &hPool->m_BlockVector;
7558 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7559 pBlockVector = m_pBlockVectors[memTypeIndex];
7561 pBlockVector->Free(allocation);
7564 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7565 FreeDedicatedMemory(allocation);
7572 allocation->SetUserData(
this, VMA_NULL);
7573 vma_delete(
this, allocation);
7576 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7579 InitStatInfo(pStats->
total);
7580 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7582 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7586 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7588 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7589 VMA_ASSERT(pBlockVector);
7590 pBlockVector->AddStats(pStats);
7595 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7596 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7598 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7603 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7605 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7606 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7607 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7608 VMA_ASSERT(pDedicatedAllocVector);
7609 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7612 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7613 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7614 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7615 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7620 VmaPostprocessCalcStatInfo(pStats->
total);
7621 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7622 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7623 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7624 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7627 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7629 VkResult VmaAllocator_T::Defragment(
7630 VmaAllocation* pAllocations,
7631 size_t allocationCount,
7632 VkBool32* pAllocationsChanged,
7636 if(pAllocationsChanged != VMA_NULL)
7638 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7640 if(pDefragmentationStats != VMA_NULL)
7642 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7645 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7647 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7649 const size_t poolCount = m_Pools.size();
7652 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7654 VmaAllocation hAlloc = pAllocations[allocIndex];
7656 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7658 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7660 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7662 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7664 VmaBlockVector* pAllocBlockVector = VMA_NULL;
7666 const VmaPool hAllocPool = hAlloc->GetPool();
7668 if(hAllocPool != VK_NULL_HANDLE)
7670 pAllocBlockVector = &hAllocPool->GetBlockVector();
7675 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7678 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7680 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7681 &pAllocationsChanged[allocIndex] : VMA_NULL;
7682 pDefragmentator->AddAllocation(hAlloc, pChanged);
7686 VkResult result = VK_SUCCESS;
7690 VkDeviceSize maxBytesToMove = SIZE_MAX;
7691 uint32_t maxAllocationsToMove = UINT32_MAX;
7692 if(pDefragmentationInfo != VMA_NULL)
7699 for(uint32_t memTypeIndex = 0;
7700 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7704 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7706 result = m_pBlockVectors[memTypeIndex]->Defragment(
7707 pDefragmentationStats,
7709 maxAllocationsToMove);
7714 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7716 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7717 pDefragmentationStats,
7719 maxAllocationsToMove);
7725 for(
size_t poolIndex = poolCount; poolIndex--; )
7727 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7731 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7733 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7735 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7742 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7744 if(hAllocation->CanBecomeLost())
7750 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7751 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7754 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7758 pAllocationInfo->
offset = 0;
7759 pAllocationInfo->
size = hAllocation->GetSize();
7761 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7764 else if(localLastUseFrameIndex == localCurrFrameIndex)
7766 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7767 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7768 pAllocationInfo->
offset = hAllocation->GetOffset();
7769 pAllocationInfo->
size = hAllocation->GetSize();
7771 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7776 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7778 localLastUseFrameIndex = localCurrFrameIndex;
7785 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7786 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7787 pAllocationInfo->
offset = hAllocation->GetOffset();
7788 pAllocationInfo->
size = hAllocation->GetSize();
7789 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7790 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7794 bool VmaAllocator_T::TouchAllocation(VmaAllocation hAllocation)
7797 if(hAllocation->CanBecomeLost())
7799 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7800 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7803 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7807 else if(localLastUseFrameIndex == localCurrFrameIndex)
7813 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7815 localLastUseFrameIndex = localCurrFrameIndex;
7826 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7828 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7841 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7843 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7844 if(res != VK_SUCCESS)
7846 vma_delete(
this, *pPool);
7853 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7854 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7860 void VmaAllocator_T::DestroyPool(VmaPool pool)
7864 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7865 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7866 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7869 vma_delete(
this, pool);
7872 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7874 pool->m_BlockVector.GetPoolStats(pPoolStats);
7877 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7879 m_CurrentFrameIndex.store(frameIndex);
7882 void VmaAllocator_T::MakePoolAllocationsLost(
7884 size_t* pLostAllocationCount)
7886 hPool->m_BlockVector.MakePoolAllocationsLost(
7887 m_CurrentFrameIndex.load(),
7888 pLostAllocationCount);
7891 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7893 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7894 (*pAllocation)->InitLost();
7897 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7899 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7902 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7904 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7905 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7907 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7908 if(res == VK_SUCCESS)
7910 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7915 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7920 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7923 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7925 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7931 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7933 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7935 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7938 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7940 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7941 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7943 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7944 m_HeapSizeLimit[heapIndex] += size;
7948 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
7950 if(hAllocation->CanBecomeLost())
7952 return VK_ERROR_MEMORY_MAP_FAILED;
7955 switch(hAllocation->GetType())
7957 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7959 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7960 char *pBytes = VMA_NULL;
7961 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
7962 if(res == VK_SUCCESS)
7964 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
7965 hAllocation->BlockAllocMap();
7969 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7970 return hAllocation->DedicatedAllocMap(
this, ppData);
7973 return VK_ERROR_MEMORY_MAP_FAILED;
7977 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
7979 switch(hAllocation->GetType())
7981 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7983 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7984 hAllocation->BlockAllocUnmap();
7985 pBlock->Unmap(
this, 1);
7988 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7989 hAllocation->DedicatedAllocUnmap(
this);
7996 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7998 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8000 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8002 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8003 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8004 VMA_ASSERT(pDedicatedAllocations);
8005 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8006 VMA_ASSERT(success);
8009 VkDeviceMemory hMemory = allocation->GetMemory();
8011 if(allocation->GetMappedData() != VMA_NULL)
8013 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8016 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8018 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8021 #if VMA_STATS_STRING_ENABLED 8023 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8025 bool dedicatedAllocationsStarted =
false;
8026 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8028 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8029 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8030 VMA_ASSERT(pDedicatedAllocVector);
8031 if(pDedicatedAllocVector->empty() ==
false)
8033 if(dedicatedAllocationsStarted ==
false)
8035 dedicatedAllocationsStarted =
true;
8036 json.WriteString(
"DedicatedAllocations");
8040 json.BeginString(
"Type ");
8041 json.ContinueString(memTypeIndex);
8046 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8048 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
8049 json.BeginObject(
true);
8051 json.WriteString(
"Type");
8052 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
8054 json.WriteString(
"Size");
8055 json.WriteNumber(hAlloc->GetSize());
8057 const void* pUserData = hAlloc->GetUserData();
8058 if(pUserData != VMA_NULL)
8060 json.WriteString(
"UserData");
8061 if(hAlloc->IsUserDataString())
8063 json.WriteString((
const char*)pUserData);
8068 json.ContinueString_Pointer(pUserData);
8079 if(dedicatedAllocationsStarted)
8085 bool allocationsStarted =
false;
8086 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8088 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8090 if(allocationsStarted ==
false)
8092 allocationsStarted =
true;
8093 json.WriteString(
"DefaultPools");
8097 json.BeginString(
"Type ");
8098 json.ContinueString(memTypeIndex);
8101 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8104 if(allocationsStarted)
8111 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8112 const size_t poolCount = m_Pools.size();
8115 json.WriteString(
"Pools");
8117 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8119 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8126 #endif // #if VMA_STATS_STRING_ENABLED 8128 static VkResult AllocateMemoryForImage(
8129 VmaAllocator allocator,
8132 VmaSuballocationType suballocType,
8133 VmaAllocation* pAllocation)
8135 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8137 VkMemoryRequirements vkMemReq = {};
8138 bool requiresDedicatedAllocation =
false;
8139 bool prefersDedicatedAllocation =
false;
8140 allocator->GetImageMemoryRequirements(image, vkMemReq,
8141 requiresDedicatedAllocation, prefersDedicatedAllocation);
8143 return allocator->AllocateMemory(
8145 requiresDedicatedAllocation,
8146 prefersDedicatedAllocation,
8149 *pAllocationCreateInfo,
8159 VmaAllocator* pAllocator)
8161 VMA_ASSERT(pCreateInfo && pAllocator);
8162 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8168 VmaAllocator allocator)
8170 if(allocator != VK_NULL_HANDLE)
8172 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8173 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8174 vma_delete(&allocationCallbacks, allocator);
8179 VmaAllocator allocator,
8180 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8182 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8183 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8187 VmaAllocator allocator,
8188 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8190 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8191 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8195 VmaAllocator allocator,
8196 uint32_t memoryTypeIndex,
8197 VkMemoryPropertyFlags* pFlags)
8199 VMA_ASSERT(allocator && pFlags);
8200 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8201 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8205 VmaAllocator allocator,
8206 uint32_t frameIndex)
8208 VMA_ASSERT(allocator);
8209 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8211 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8213 allocator->SetCurrentFrameIndex(frameIndex);
8217 VmaAllocator allocator,
8220 VMA_ASSERT(allocator && pStats);
8221 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8222 allocator->CalculateStats(pStats);
8225 #if VMA_STATS_STRING_ENABLED 8228 VmaAllocator allocator,
8229 char** ppStatsString,
8230 VkBool32 detailedMap)
8232 VMA_ASSERT(allocator && ppStatsString);
8233 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8235 VmaStringBuilder sb(allocator);
8237 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8241 allocator->CalculateStats(&stats);
8243 json.WriteString(
"Total");
8244 VmaPrintStatInfo(json, stats.
total);
8246 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8248 json.BeginString(
"Heap ");
8249 json.ContinueString(heapIndex);
8253 json.WriteString(
"Size");
8254 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8256 json.WriteString(
"Flags");
8257 json.BeginArray(
true);
8258 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8260 json.WriteString(
"DEVICE_LOCAL");
8266 json.WriteString(
"Stats");
8267 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8270 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8272 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8274 json.BeginString(
"Type ");
8275 json.ContinueString(typeIndex);
8280 json.WriteString(
"Flags");
8281 json.BeginArray(
true);
8282 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8283 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8285 json.WriteString(
"DEVICE_LOCAL");
8287 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8289 json.WriteString(
"HOST_VISIBLE");
8291 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8293 json.WriteString(
"HOST_COHERENT");
8295 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8297 json.WriteString(
"HOST_CACHED");
8299 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8301 json.WriteString(
"LAZILY_ALLOCATED");
8307 json.WriteString(
"Stats");
8308 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8317 if(detailedMap == VK_TRUE)
8319 allocator->PrintDetailedMap(json);
8325 const size_t len = sb.GetLength();
8326 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8329 memcpy(pChars, sb.GetData(), len);
8332 *ppStatsString = pChars;
8336 VmaAllocator allocator,
8339 if(pStatsString != VMA_NULL)
8341 VMA_ASSERT(allocator);
8342 size_t len = strlen(pStatsString);
8343 vma_delete_array(allocator, pStatsString, len + 1);
8347 #endif // #if VMA_STATS_STRING_ENABLED 8353 VmaAllocator allocator,
8354 uint32_t memoryTypeBits,
8356 uint32_t* pMemoryTypeIndex)
8358 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8359 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8360 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8367 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8371 switch(pAllocationCreateInfo->
usage)
8376 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8379 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8382 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8383 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8386 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8387 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8393 *pMemoryTypeIndex = UINT32_MAX;
8394 uint32_t minCost = UINT32_MAX;
8395 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8396 memTypeIndex < allocator->GetMemoryTypeCount();
8397 ++memTypeIndex, memTypeBit <<= 1)
8400 if((memTypeBit & memoryTypeBits) != 0)
8402 const VkMemoryPropertyFlags currFlags =
8403 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8405 if((requiredFlags & ~currFlags) == 0)
8408 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8410 if(currCost < minCost)
8412 *pMemoryTypeIndex = memTypeIndex;
8422 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8426 VmaAllocator allocator,
8427 const VkBufferCreateInfo* pBufferCreateInfo,
8429 uint32_t* pMemoryTypeIndex)
8431 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8432 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8433 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8434 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8436 const VkDevice hDev = allocator->m_hDevice;
8437 VkBuffer hBuffer = VK_NULL_HANDLE;
8438 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
8439 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
8440 if(res == VK_SUCCESS)
8442 VkMemoryRequirements memReq = {};
8443 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
8444 hDev, hBuffer, &memReq);
8448 memReq.memoryTypeBits,
8449 pAllocationCreateInfo,
8452 allocator->GetVulkanFunctions().vkDestroyBuffer(
8453 hDev, hBuffer, allocator->GetAllocationCallbacks());
8459 VmaAllocator allocator,
8460 const VkImageCreateInfo* pImageCreateInfo,
8462 uint32_t* pMemoryTypeIndex)
8464 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8465 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
8466 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8467 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8469 const VkDevice hDev = allocator->m_hDevice;
8470 VkImage hImage = VK_NULL_HANDLE;
8471 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
8472 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
8473 if(res == VK_SUCCESS)
8475 VkMemoryRequirements memReq = {};
8476 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
8477 hDev, hImage, &memReq);
8481 memReq.memoryTypeBits,
8482 pAllocationCreateInfo,
8485 allocator->GetVulkanFunctions().vkDestroyImage(
8486 hDev, hImage, allocator->GetAllocationCallbacks());
8492 VmaAllocator allocator,
8496 VMA_ASSERT(allocator && pCreateInfo && pPool);
8498 VMA_DEBUG_LOG(
"vmaCreatePool");
8500 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8502 return allocator->CreatePool(pCreateInfo, pPool);
8506 VmaAllocator allocator,
8509 VMA_ASSERT(allocator);
8511 if(pool == VK_NULL_HANDLE)
8516 VMA_DEBUG_LOG(
"vmaDestroyPool");
8518 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8520 allocator->DestroyPool(pool);
8524 VmaAllocator allocator,
8528 VMA_ASSERT(allocator && pool && pPoolStats);
8530 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8532 allocator->GetPoolStats(pool, pPoolStats);
8536 VmaAllocator allocator,
8538 size_t* pLostAllocationCount)
8540 VMA_ASSERT(allocator && pool);
8542 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8544 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8548 VmaAllocator allocator,
8549 const VkMemoryRequirements* pVkMemoryRequirements,
8551 VmaAllocation* pAllocation,
8554 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8556 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8558 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8560 VkResult result = allocator->AllocateMemory(
8561 *pVkMemoryRequirements,
8567 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8570 if(pAllocationInfo && result == VK_SUCCESS)
8572 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8579 VmaAllocator allocator,
8582 VmaAllocation* pAllocation,
8585 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8587 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8589 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8591 VkMemoryRequirements vkMemReq = {};
8592 bool requiresDedicatedAllocation =
false;
8593 bool prefersDedicatedAllocation =
false;
8594 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8595 requiresDedicatedAllocation,
8596 prefersDedicatedAllocation);
8598 VkResult result = allocator->AllocateMemory(
8600 requiresDedicatedAllocation,
8601 prefersDedicatedAllocation,
8605 VMA_SUBALLOCATION_TYPE_BUFFER,
8608 if(pAllocationInfo && result == VK_SUCCESS)
8610 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8617 VmaAllocator allocator,
8620 VmaAllocation* pAllocation,
8623 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8625 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8627 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8629 VkResult result = AllocateMemoryForImage(
8633 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8636 if(pAllocationInfo && result == VK_SUCCESS)
8638 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8645 VmaAllocator allocator,
8646 VmaAllocation allocation)
8648 VMA_ASSERT(allocator && allocation);
8650 VMA_DEBUG_LOG(
"vmaFreeMemory");
8652 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8654 allocator->FreeMemory(allocation);
8658 VmaAllocator allocator,
8659 VmaAllocation allocation,
8662 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8664 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8666 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8670 VmaAllocator allocator,
8671 VmaAllocation allocation)
8673 VMA_ASSERT(allocator && allocation);
8675 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8677 return allocator->TouchAllocation(allocation);
8681 VmaAllocator allocator,
8682 VmaAllocation allocation,
8685 VMA_ASSERT(allocator && allocation);
8687 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8689 allocation->SetUserData(allocator, pUserData);
8693 VmaAllocator allocator,
8694 VmaAllocation* pAllocation)
8696 VMA_ASSERT(allocator && pAllocation);
8698 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8700 allocator->CreateLostAllocation(pAllocation);
8704 VmaAllocator allocator,
8705 VmaAllocation allocation,
8708 VMA_ASSERT(allocator && allocation && ppData);
8710 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8712 return allocator->Map(allocation, ppData);
8716 VmaAllocator allocator,
8717 VmaAllocation allocation)
8719 VMA_ASSERT(allocator && allocation);
8721 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8723 allocator->Unmap(allocation);
8727 VmaAllocator allocator,
8728 VmaAllocation* pAllocations,
8729 size_t allocationCount,
8730 VkBool32* pAllocationsChanged,
8734 VMA_ASSERT(allocator && pAllocations);
8736 VMA_DEBUG_LOG(
"vmaDefragment");
8738 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8740 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8744 VmaAllocator allocator,
8745 const VkBufferCreateInfo* pBufferCreateInfo,
8748 VmaAllocation* pAllocation,
8751 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8753 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8755 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8757 *pBuffer = VK_NULL_HANDLE;
8758 *pAllocation = VK_NULL_HANDLE;
8761 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8762 allocator->m_hDevice,
8764 allocator->GetAllocationCallbacks(),
8769 VkMemoryRequirements vkMemReq = {};
8770 bool requiresDedicatedAllocation =
false;
8771 bool prefersDedicatedAllocation =
false;
8772 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8773 requiresDedicatedAllocation, prefersDedicatedAllocation);
8777 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8779 VMA_ASSERT(vkMemReq.alignment %
8780 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8782 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8784 VMA_ASSERT(vkMemReq.alignment %
8785 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8787 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8789 VMA_ASSERT(vkMemReq.alignment %
8790 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8794 res = allocator->AllocateMemory(
8796 requiresDedicatedAllocation,
8797 prefersDedicatedAllocation,
8800 *pAllocationCreateInfo,
8801 VMA_SUBALLOCATION_TYPE_BUFFER,
8806 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8807 allocator->m_hDevice,
8809 (*pAllocation)->GetMemory(),
8810 (*pAllocation)->GetOffset());
8814 if(pAllocationInfo != VMA_NULL)
8816 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8820 allocator->FreeMemory(*pAllocation);
8821 *pAllocation = VK_NULL_HANDLE;
8822 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8823 *pBuffer = VK_NULL_HANDLE;
8826 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8827 *pBuffer = VK_NULL_HANDLE;
8834 VmaAllocator allocator,
8836 VmaAllocation allocation)
8838 if(buffer != VK_NULL_HANDLE)
8840 VMA_ASSERT(allocator);
8842 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8844 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8846 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8848 allocator->FreeMemory(allocation);
8853 VmaAllocator allocator,
8854 const VkImageCreateInfo* pImageCreateInfo,
8857 VmaAllocation* pAllocation,
8860 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8862 VMA_DEBUG_LOG(
"vmaCreateImage");
8864 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8866 *pImage = VK_NULL_HANDLE;
8867 *pAllocation = VK_NULL_HANDLE;
8870 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8871 allocator->m_hDevice,
8873 allocator->GetAllocationCallbacks(),
8877 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8878 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8879 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8882 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8886 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8887 allocator->m_hDevice,
8889 (*pAllocation)->GetMemory(),
8890 (*pAllocation)->GetOffset());
8894 if(pAllocationInfo != VMA_NULL)
8896 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8900 allocator->FreeMemory(*pAllocation);
8901 *pAllocation = VK_NULL_HANDLE;
8902 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8903 *pImage = VK_NULL_HANDLE;
8906 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8907 *pImage = VK_NULL_HANDLE;
8914 VmaAllocator allocator,
8916 VmaAllocation allocation)
8918 if(image != VK_NULL_HANDLE)
8920 VMA_ASSERT(allocator);
8922 VMA_DEBUG_LOG(
"vmaDestroyImage");
8924 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8926 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8928 allocator->FreeMemory(allocation);
8932 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:897
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1151
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:922
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:907
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1108
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:901
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1457
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:919
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1632
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1327
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1381
Definition: vk_mem_alloc.h:1188
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:890
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1226
Definition: vk_mem_alloc.h:1135
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:931
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:984
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:916
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1139
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1049
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:904
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1048
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:912
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1636
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:948
VmaStatInfo total
Definition: vk_mem_alloc.h:1058
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1644
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1210
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1627
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:905
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:832
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:925
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1335
Definition: vk_mem_alloc.h:1329
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1467
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:902
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1247
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1351
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1387
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:888
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1338
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1086
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1622
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1640
Definition: vk_mem_alloc.h:1125
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1234
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:903
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1054
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:838
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:859
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:864
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1642
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1221
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1397
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:898
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1037
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1346
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:851
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1195
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1050
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:855
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1341
Definition: vk_mem_alloc.h:1134
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1216
Definition: vk_mem_alloc.h:1207
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1040
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:900
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1359
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:934
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1390
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1205
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1240
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:972
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1056
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1175
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1049
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:909
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:853
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:908
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1373
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1481
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:928
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1049
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1046
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1378
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1462
Definition: vk_mem_alloc.h:1203
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1638
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:896
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:911
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1044
Definition: vk_mem_alloc.h:1091
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1331
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1042
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:906
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:910
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1162
Definition: vk_mem_alloc.h:1118
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1476
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:886
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:899
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1443
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1309
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1050
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
TODO finish documentation...
Definition: vk_mem_alloc.h:1201
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1057
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1384
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1050
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1448