23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 893 #include <vulkan/vulkan.h> 895 VK_DEFINE_HANDLE(VmaAllocator)
899 VmaAllocator allocator,
901 VkDeviceMemory memory,
905 VmaAllocator allocator,
907 VkDeviceMemory memory,
1056 VmaAllocator* pAllocator);
1060 VmaAllocator allocator);
1067 VmaAllocator allocator,
1068 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1075 VmaAllocator allocator,
1076 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1085 VmaAllocator allocator,
1086 uint32_t memoryTypeIndex,
1087 VkMemoryPropertyFlags* pFlags);
1098 VmaAllocator allocator,
1099 uint32_t frameIndex);
1129 VmaAllocator allocator,
1132 #define VMA_STATS_STRING_ENABLED 1 1134 #if VMA_STATS_STRING_ENABLED 1140 VmaAllocator allocator,
1141 char** ppStatsString,
1142 VkBool32 detailedMap);
1145 VmaAllocator allocator,
1146 char* pStatsString);
1148 #endif // #if VMA_STATS_STRING_ENABLED 1150 VK_DEFINE_HANDLE(VmaPool)
1333 VmaAllocator allocator,
1334 uint32_t memoryTypeBits,
1336 uint32_t* pMemoryTypeIndex);
1351 VmaAllocator allocator,
1352 const VkBufferCreateInfo* pBufferCreateInfo,
1354 uint32_t* pMemoryTypeIndex);
1369 VmaAllocator allocator,
1370 const VkImageCreateInfo* pImageCreateInfo,
1372 uint32_t* pMemoryTypeIndex);
1473 VmaAllocator allocator,
1480 VmaAllocator allocator,
1490 VmaAllocator allocator,
1501 VmaAllocator allocator,
1503 size_t* pLostAllocationCount);
1505 VK_DEFINE_HANDLE(VmaAllocation)
1561 VmaAllocator allocator,
1562 const VkMemoryRequirements* pVkMemoryRequirements,
1564 VmaAllocation* pAllocation,
1574 VmaAllocator allocator,
1577 VmaAllocation* pAllocation,
1582 VmaAllocator allocator,
1585 VmaAllocation* pAllocation,
1590 VmaAllocator allocator,
1591 VmaAllocation allocation);
1610 VmaAllocator allocator,
1611 VmaAllocation allocation,
1629 VmaAllocator allocator,
1630 VmaAllocation allocation);
1646 VmaAllocator allocator,
1647 VmaAllocation allocation,
1661 VmaAllocator allocator,
1662 VmaAllocation* pAllocation);
1699 VmaAllocator allocator,
1700 VmaAllocation allocation,
1708 VmaAllocator allocator,
1709 VmaAllocation allocation);
1820 VmaAllocator allocator,
1821 VmaAllocation* pAllocations,
1822 size_t allocationCount,
1823 VkBool32* pAllocationsChanged,
1854 VmaAllocator allocator,
1855 const VkBufferCreateInfo* pBufferCreateInfo,
1858 VmaAllocation* pAllocation,
1873 VmaAllocator allocator,
1875 VmaAllocation allocation);
1879 VmaAllocator allocator,
1880 const VkImageCreateInfo* pImageCreateInfo,
1883 VmaAllocation* pAllocation,
1898 VmaAllocator allocator,
1900 VmaAllocation allocation);
1906 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1909 #ifdef __INTELLISENSE__ 1910 #define VMA_IMPLEMENTATION 1913 #ifdef VMA_IMPLEMENTATION 1914 #undef VMA_IMPLEMENTATION 1936 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 1937 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1949 #if VMA_USE_STL_CONTAINERS 1950 #define VMA_USE_STL_VECTOR 1 1951 #define VMA_USE_STL_UNORDERED_MAP 1 1952 #define VMA_USE_STL_LIST 1 1955 #if VMA_USE_STL_VECTOR 1959 #if VMA_USE_STL_UNORDERED_MAP 1960 #include <unordered_map> 1963 #if VMA_USE_STL_LIST 1972 #include <algorithm> 1976 #if !defined(_WIN32) && !defined(__APPLE__) 1982 #define VMA_NULL nullptr 1985 #if defined(__APPLE__) || defined(__ANDROID__) 1987 void *aligned_alloc(
size_t alignment,
size_t size)
1990 if(alignment <
sizeof(
void*))
1992 alignment =
sizeof(
void*);
1996 if(posix_memalign(&pointer, alignment, size) == 0)
2005 #define VMA_ASSERT(expr) assert(expr) 2007 #define VMA_ASSERT(expr) 2013 #ifndef VMA_HEAVY_ASSERT 2015 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2017 #define VMA_HEAVY_ASSERT(expr) 2021 #ifndef VMA_ALIGN_OF 2022 #define VMA_ALIGN_OF(type) (__alignof(type)) 2025 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2027 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2029 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2033 #ifndef VMA_SYSTEM_FREE 2035 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2037 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2042 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2046 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2050 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2054 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2057 #ifndef VMA_DEBUG_LOG 2058 #define VMA_DEBUG_LOG(format, ...) 2068 #if VMA_STATS_STRING_ENABLED 2069 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2071 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2073 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2075 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2077 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2079 snprintf(outStr, strLen,
"%p", ptr);
2089 void Lock() { m_Mutex.lock(); }
2090 void Unlock() { m_Mutex.unlock(); }
2094 #define VMA_MUTEX VmaMutex 2105 #ifndef VMA_ATOMIC_UINT32 2106 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2109 #ifndef VMA_BEST_FIT 2122 #define VMA_BEST_FIT (1) 2125 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2130 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2133 #ifndef VMA_DEBUG_ALIGNMENT 2138 #define VMA_DEBUG_ALIGNMENT (1) 2141 #ifndef VMA_DEBUG_MARGIN 2146 #define VMA_DEBUG_MARGIN (0) 2149 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2154 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2157 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2162 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2165 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2166 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2170 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2171 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2175 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2181 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2182 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2185 static inline uint32_t VmaCountBitsSet(uint32_t v)
2187 uint32_t c = v - ((v >> 1) & 0x55555555);
2188 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2189 c = ((c >> 4) + c) & 0x0F0F0F0F;
2190 c = ((c >> 8) + c) & 0x00FF00FF;
2191 c = ((c >> 16) + c) & 0x0000FFFF;
2197 template <
typename T>
2198 static inline T VmaAlignUp(T val, T align)
2200 return (val + align - 1) / align * align;
2204 template <
typename T>
2205 inline T VmaRoundDiv(T x, T y)
2207 return (x + (y / (T)2)) / y;
2212 template<
typename Iterator,
typename Compare>
2213 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2215 Iterator centerValue = end; --centerValue;
2216 Iterator insertIndex = beg;
2217 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2219 if(cmp(*memTypeIndex, *centerValue))
2221 if(insertIndex != memTypeIndex)
2223 VMA_SWAP(*memTypeIndex, *insertIndex);
2228 if(insertIndex != centerValue)
2230 VMA_SWAP(*insertIndex, *centerValue);
2235 template<
typename Iterator,
typename Compare>
2236 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2240 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2241 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2242 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2246 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2248 #endif // #ifndef VMA_SORT 2257 static inline bool VmaBlocksOnSamePage(
2258 VkDeviceSize resourceAOffset,
2259 VkDeviceSize resourceASize,
2260 VkDeviceSize resourceBOffset,
2261 VkDeviceSize pageSize)
2263 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2264 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2265 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2266 VkDeviceSize resourceBStart = resourceBOffset;
2267 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2268 return resourceAEndPage == resourceBStartPage;
2271 enum VmaSuballocationType
2273 VMA_SUBALLOCATION_TYPE_FREE = 0,
2274 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2275 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2276 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2277 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2278 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2279 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2288 static inline bool VmaIsBufferImageGranularityConflict(
2289 VmaSuballocationType suballocType1,
2290 VmaSuballocationType suballocType2)
2292 if(suballocType1 > suballocType2)
2294 VMA_SWAP(suballocType1, suballocType2);
2297 switch(suballocType1)
2299 case VMA_SUBALLOCATION_TYPE_FREE:
2301 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2303 case VMA_SUBALLOCATION_TYPE_BUFFER:
2305 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2306 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2307 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2309 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2310 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2311 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2312 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2314 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2315 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2327 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2328 m_pMutex(useMutex ? &mutex : VMA_NULL)
2345 VMA_MUTEX* m_pMutex;
2348 #if VMA_DEBUG_GLOBAL_MUTEX 2349 static VMA_MUTEX gDebugGlobalMutex;
2350 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2352 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2356 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2367 template <
typename IterT,
typename KeyT,
typename CmpT>
2368 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2370 size_t down = 0, up = (end - beg);
2373 const size_t mid = (down + up) / 2;
2374 if(cmp(*(beg+mid), key))
2389 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2391 if((pAllocationCallbacks != VMA_NULL) &&
2392 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2394 return (*pAllocationCallbacks->pfnAllocation)(
2395 pAllocationCallbacks->pUserData,
2398 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2402 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2406 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2408 if((pAllocationCallbacks != VMA_NULL) &&
2409 (pAllocationCallbacks->pfnFree != VMA_NULL))
2411 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2415 VMA_SYSTEM_FREE(ptr);
2419 template<
typename T>
2420 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2422 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2425 template<
typename T>
2426 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2428 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2431 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2433 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2435 template<
typename T>
2436 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2439 VmaFree(pAllocationCallbacks, ptr);
2442 template<
typename T>
2443 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2447 for(
size_t i = count; i--; )
2451 VmaFree(pAllocationCallbacks, ptr);
2456 template<
typename T>
2457 class VmaStlAllocator
2460 const VkAllocationCallbacks*
const m_pCallbacks;
2461 typedef T value_type;
2463 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2464 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2466 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2467 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2469 template<
typename U>
2470 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2472 return m_pCallbacks == rhs.m_pCallbacks;
2474 template<
typename U>
2475 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2477 return m_pCallbacks != rhs.m_pCallbacks;
2480 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2483 #if VMA_USE_STL_VECTOR 2485 #define VmaVector std::vector 2487 template<
typename T,
typename allocatorT>
2488 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2490 vec.insert(vec.begin() + index, item);
2493 template<
typename T,
typename allocatorT>
2494 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2496 vec.erase(vec.begin() + index);
2499 #else // #if VMA_USE_STL_VECTOR 2504 template<
typename T,
typename AllocatorT>
2508 typedef T value_type;
2510 VmaVector(
const AllocatorT& allocator) :
2511 m_Allocator(allocator),
2518 VmaVector(
size_t count,
const AllocatorT& allocator) :
2519 m_Allocator(allocator),
2520 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2526 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2527 m_Allocator(src.m_Allocator),
2528 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2529 m_Count(src.m_Count),
2530 m_Capacity(src.m_Count)
2534 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2540 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2543 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2547 resize(rhs.m_Count);
2550 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2556 bool empty()
const {
return m_Count == 0; }
2557 size_t size()
const {
return m_Count; }
2558 T* data() {
return m_pArray; }
2559 const T* data()
const {
return m_pArray; }
2561 T& operator[](
size_t index)
2563 VMA_HEAVY_ASSERT(index < m_Count);
2564 return m_pArray[index];
2566 const T& operator[](
size_t index)
const 2568 VMA_HEAVY_ASSERT(index < m_Count);
2569 return m_pArray[index];
2574 VMA_HEAVY_ASSERT(m_Count > 0);
2577 const T& front()
const 2579 VMA_HEAVY_ASSERT(m_Count > 0);
2584 VMA_HEAVY_ASSERT(m_Count > 0);
2585 return m_pArray[m_Count - 1];
2587 const T& back()
const 2589 VMA_HEAVY_ASSERT(m_Count > 0);
2590 return m_pArray[m_Count - 1];
2593 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2595 newCapacity = VMA_MAX(newCapacity, m_Count);
2597 if((newCapacity < m_Capacity) && !freeMemory)
2599 newCapacity = m_Capacity;
2602 if(newCapacity != m_Capacity)
2604 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2607 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2609 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2610 m_Capacity = newCapacity;
2611 m_pArray = newArray;
2615 void resize(
size_t newCount,
bool freeMemory =
false)
2617 size_t newCapacity = m_Capacity;
2618 if(newCount > m_Capacity)
2620 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2624 newCapacity = newCount;
2627 if(newCapacity != m_Capacity)
2629 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2630 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2631 if(elementsToCopy != 0)
2633 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2635 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2636 m_Capacity = newCapacity;
2637 m_pArray = newArray;
2643 void clear(
bool freeMemory =
false)
2645 resize(0, freeMemory);
2648 void insert(
size_t index,
const T& src)
2650 VMA_HEAVY_ASSERT(index <= m_Count);
2651 const size_t oldCount = size();
2652 resize(oldCount + 1);
2653 if(index < oldCount)
2655 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2657 m_pArray[index] = src;
2660 void remove(
size_t index)
2662 VMA_HEAVY_ASSERT(index < m_Count);
2663 const size_t oldCount = size();
2664 if(index < oldCount - 1)
2666 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2668 resize(oldCount - 1);
2671 void push_back(
const T& src)
2673 const size_t newIndex = size();
2674 resize(newIndex + 1);
2675 m_pArray[newIndex] = src;
2680 VMA_HEAVY_ASSERT(m_Count > 0);
2684 void push_front(
const T& src)
2691 VMA_HEAVY_ASSERT(m_Count > 0);
2695 typedef T* iterator;
2697 iterator begin() {
return m_pArray; }
2698 iterator end() {
return m_pArray + m_Count; }
2701 AllocatorT m_Allocator;
2707 template<
typename T,
typename allocatorT>
2708 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2710 vec.insert(index, item);
2713 template<
typename T,
typename allocatorT>
2714 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2719 #endif // #if VMA_USE_STL_VECTOR 2721 template<
typename CmpLess,
typename VectorT>
2722 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2724 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2726 vector.data() + vector.size(),
2728 CmpLess()) - vector.data();
2729 VmaVectorInsert(vector, indexToInsert, value);
2730 return indexToInsert;
2733 template<
typename CmpLess,
typename VectorT>
2734 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2737 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2742 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2744 size_t indexToRemove = it - vector.begin();
2745 VmaVectorRemove(vector, indexToRemove);
2751 template<
typename CmpLess,
typename VectorT>
2752 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2755 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2757 vector.data() + vector.size(),
2760 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2762 return it - vector.begin();
2766 return vector.size();
2778 template<
typename T>
2779 class VmaPoolAllocator
2782 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2783 ~VmaPoolAllocator();
2791 uint32_t NextFreeIndex;
2798 uint32_t FirstFreeIndex;
2801 const VkAllocationCallbacks* m_pAllocationCallbacks;
2802 size_t m_ItemsPerBlock;
2803 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2805 ItemBlock& CreateNewBlock();
2808 template<
typename T>
2809 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2810 m_pAllocationCallbacks(pAllocationCallbacks),
2811 m_ItemsPerBlock(itemsPerBlock),
2812 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2814 VMA_ASSERT(itemsPerBlock > 0);
2817 template<
typename T>
2818 VmaPoolAllocator<T>::~VmaPoolAllocator()
2823 template<
typename T>
2824 void VmaPoolAllocator<T>::Clear()
2826 for(
size_t i = m_ItemBlocks.size(); i--; )
2827 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2828 m_ItemBlocks.clear();
2831 template<
typename T>
2832 T* VmaPoolAllocator<T>::Alloc()
2834 for(
size_t i = m_ItemBlocks.size(); i--; )
2836 ItemBlock& block = m_ItemBlocks[i];
2838 if(block.FirstFreeIndex != UINT32_MAX)
2840 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2841 block.FirstFreeIndex = pItem->NextFreeIndex;
2842 return &pItem->Value;
2847 ItemBlock& newBlock = CreateNewBlock();
2848 Item*
const pItem = &newBlock.pItems[0];
2849 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2850 return &pItem->Value;
2853 template<
typename T>
2854 void VmaPoolAllocator<T>::Free(T* ptr)
2857 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2859 ItemBlock& block = m_ItemBlocks[i];
2863 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2866 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2868 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2869 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2870 block.FirstFreeIndex = index;
2874 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2877 template<
typename T>
2878 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2880 ItemBlock newBlock = {
2881 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2883 m_ItemBlocks.push_back(newBlock);
2886 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2887 newBlock.pItems[i].NextFreeIndex = i + 1;
2888 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2889 return m_ItemBlocks.back();
2895 #if VMA_USE_STL_LIST 2897 #define VmaList std::list 2899 #else // #if VMA_USE_STL_LIST 2901 template<
typename T>
2910 template<
typename T>
2914 typedef VmaListItem<T> ItemType;
2916 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2920 size_t GetCount()
const {
return m_Count; }
2921 bool IsEmpty()
const {
return m_Count == 0; }
2923 ItemType* Front() {
return m_pFront; }
2924 const ItemType* Front()
const {
return m_pFront; }
2925 ItemType* Back() {
return m_pBack; }
2926 const ItemType* Back()
const {
return m_pBack; }
2928 ItemType* PushBack();
2929 ItemType* PushFront();
2930 ItemType* PushBack(
const T& value);
2931 ItemType* PushFront(
const T& value);
2936 ItemType* InsertBefore(ItemType* pItem);
2938 ItemType* InsertAfter(ItemType* pItem);
2940 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2941 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2943 void Remove(ItemType* pItem);
2946 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2947 VmaPoolAllocator<ItemType> m_ItemAllocator;
2953 VmaRawList(
const VmaRawList<T>& src);
2954 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2957 template<
typename T>
2958 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2959 m_pAllocationCallbacks(pAllocationCallbacks),
2960 m_ItemAllocator(pAllocationCallbacks, 128),
2967 template<
typename T>
2968 VmaRawList<T>::~VmaRawList()
2974 template<
typename T>
2975 void VmaRawList<T>::Clear()
2977 if(IsEmpty() ==
false)
2979 ItemType* pItem = m_pBack;
2980 while(pItem != VMA_NULL)
2982 ItemType*
const pPrevItem = pItem->pPrev;
2983 m_ItemAllocator.Free(pItem);
2986 m_pFront = VMA_NULL;
2992 template<
typename T>
2993 VmaListItem<T>* VmaRawList<T>::PushBack()
2995 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2996 pNewItem->pNext = VMA_NULL;
2999 pNewItem->pPrev = VMA_NULL;
3000 m_pFront = pNewItem;
3006 pNewItem->pPrev = m_pBack;
3007 m_pBack->pNext = pNewItem;
3014 template<
typename T>
3015 VmaListItem<T>* VmaRawList<T>::PushFront()
3017 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3018 pNewItem->pPrev = VMA_NULL;
3021 pNewItem->pNext = VMA_NULL;
3022 m_pFront = pNewItem;
3028 pNewItem->pNext = m_pFront;
3029 m_pFront->pPrev = pNewItem;
3030 m_pFront = pNewItem;
3036 template<
typename T>
3037 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3039 ItemType*
const pNewItem = PushBack();
3040 pNewItem->Value = value;
3044 template<
typename T>
3045 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3047 ItemType*
const pNewItem = PushFront();
3048 pNewItem->Value = value;
3052 template<
typename T>
3053 void VmaRawList<T>::PopBack()
3055 VMA_HEAVY_ASSERT(m_Count > 0);
3056 ItemType*
const pBackItem = m_pBack;
3057 ItemType*
const pPrevItem = pBackItem->pPrev;
3058 if(pPrevItem != VMA_NULL)
3060 pPrevItem->pNext = VMA_NULL;
3062 m_pBack = pPrevItem;
3063 m_ItemAllocator.Free(pBackItem);
3067 template<
typename T>
3068 void VmaRawList<T>::PopFront()
3070 VMA_HEAVY_ASSERT(m_Count > 0);
3071 ItemType*
const pFrontItem = m_pFront;
3072 ItemType*
const pNextItem = pFrontItem->pNext;
3073 if(pNextItem != VMA_NULL)
3075 pNextItem->pPrev = VMA_NULL;
3077 m_pFront = pNextItem;
3078 m_ItemAllocator.Free(pFrontItem);
3082 template<
typename T>
3083 void VmaRawList<T>::Remove(ItemType* pItem)
3085 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3086 VMA_HEAVY_ASSERT(m_Count > 0);
3088 if(pItem->pPrev != VMA_NULL)
3090 pItem->pPrev->pNext = pItem->pNext;
3094 VMA_HEAVY_ASSERT(m_pFront == pItem);
3095 m_pFront = pItem->pNext;
3098 if(pItem->pNext != VMA_NULL)
3100 pItem->pNext->pPrev = pItem->pPrev;
3104 VMA_HEAVY_ASSERT(m_pBack == pItem);
3105 m_pBack = pItem->pPrev;
3108 m_ItemAllocator.Free(pItem);
3112 template<
typename T>
3113 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3115 if(pItem != VMA_NULL)
3117 ItemType*
const prevItem = pItem->pPrev;
3118 ItemType*
const newItem = m_ItemAllocator.Alloc();
3119 newItem->pPrev = prevItem;
3120 newItem->pNext = pItem;
3121 pItem->pPrev = newItem;
3122 if(prevItem != VMA_NULL)
3124 prevItem->pNext = newItem;
3128 VMA_HEAVY_ASSERT(m_pFront == pItem);
3138 template<
typename T>
3139 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3141 if(pItem != VMA_NULL)
3143 ItemType*
const nextItem = pItem->pNext;
3144 ItemType*
const newItem = m_ItemAllocator.Alloc();
3145 newItem->pNext = nextItem;
3146 newItem->pPrev = pItem;
3147 pItem->pNext = newItem;
3148 if(nextItem != VMA_NULL)
3150 nextItem->pPrev = newItem;
3154 VMA_HEAVY_ASSERT(m_pBack == pItem);
3164 template<
typename T>
3165 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3167 ItemType*
const newItem = InsertBefore(pItem);
3168 newItem->Value = value;
3172 template<
typename T>
3173 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3175 ItemType*
const newItem = InsertAfter(pItem);
3176 newItem->Value = value;
3180 template<
typename T,
typename AllocatorT>
3193 T& operator*()
const 3195 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3196 return m_pItem->Value;
3198 T* operator->()
const 3200 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3201 return &m_pItem->Value;
3204 iterator& operator++()
3206 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3207 m_pItem = m_pItem->pNext;
3210 iterator& operator--()
3212 if(m_pItem != VMA_NULL)
3214 m_pItem = m_pItem->pPrev;
3218 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3219 m_pItem = m_pList->Back();
3224 iterator operator++(
int)
3226 iterator result = *
this;
3230 iterator operator--(
int)
3232 iterator result = *
this;
3237 bool operator==(
const iterator& rhs)
const 3239 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3240 return m_pItem == rhs.m_pItem;
3242 bool operator!=(
const iterator& rhs)
const 3244 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3245 return m_pItem != rhs.m_pItem;
3249 VmaRawList<T>* m_pList;
3250 VmaListItem<T>* m_pItem;
3252 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3258 friend class VmaList<T, AllocatorT>;
3261 class const_iterator
3270 const_iterator(
const iterator& src) :
3271 m_pList(src.m_pList),
3272 m_pItem(src.m_pItem)
3276 const T& operator*()
const 3278 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3279 return m_pItem->Value;
3281 const T* operator->()
const 3283 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3284 return &m_pItem->Value;
3287 const_iterator& operator++()
3289 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3290 m_pItem = m_pItem->pNext;
3293 const_iterator& operator--()
3295 if(m_pItem != VMA_NULL)
3297 m_pItem = m_pItem->pPrev;
3301 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3302 m_pItem = m_pList->Back();
3307 const_iterator operator++(
int)
3309 const_iterator result = *
this;
3313 const_iterator operator--(
int)
3315 const_iterator result = *
this;
3320 bool operator==(
const const_iterator& rhs)
const 3322 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3323 return m_pItem == rhs.m_pItem;
3325 bool operator!=(
const const_iterator& rhs)
const 3327 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3328 return m_pItem != rhs.m_pItem;
3332 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3338 const VmaRawList<T>* m_pList;
3339 const VmaListItem<T>* m_pItem;
3341 friend class VmaList<T, AllocatorT>;
3344 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3346 bool empty()
const {
return m_RawList.IsEmpty(); }
3347 size_t size()
const {
return m_RawList.GetCount(); }
3349 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3350 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3352 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3353 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3355 void clear() { m_RawList.Clear(); }
3356 void push_back(
const T& value) { m_RawList.PushBack(value); }
3357 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3358 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3361 VmaRawList<T> m_RawList;
3364 #endif // #if VMA_USE_STL_LIST 3372 #if VMA_USE_STL_UNORDERED_MAP 3374 #define VmaPair std::pair 3376 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3377 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3379 #else // #if VMA_USE_STL_UNORDERED_MAP 3381 template<
typename T1,
typename T2>
3387 VmaPair() : first(), second() { }
3388 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3394 template<
typename KeyT,
typename ValueT>
3398 typedef VmaPair<KeyT, ValueT> PairType;
3399 typedef PairType* iterator;
3401 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3403 iterator begin() {
return m_Vector.begin(); }
3404 iterator end() {
return m_Vector.end(); }
3406 void insert(
const PairType& pair);
3407 iterator find(
const KeyT& key);
3408 void erase(iterator it);
3411 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3414 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3416 template<
typename FirstT,
typename SecondT>
3417 struct VmaPairFirstLess
3419 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3421 return lhs.first < rhs.first;
3423 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3425 return lhs.first < rhsFirst;
3429 template<
typename KeyT,
typename ValueT>
3430 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3432 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3434 m_Vector.data() + m_Vector.size(),
3436 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3437 VmaVectorInsert(m_Vector, indexToInsert, pair);
3440 template<
typename KeyT,
typename ValueT>
3441 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3443 PairType* it = VmaBinaryFindFirstNotLess(
3445 m_Vector.data() + m_Vector.size(),
3447 VmaPairFirstLess<KeyT, ValueT>());
3448 if((it != m_Vector.end()) && (it->first == key))
3454 return m_Vector.end();
3458 template<
typename KeyT,
typename ValueT>
3459 void VmaMap<KeyT, ValueT>::erase(iterator it)
3461 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3464 #endif // #if VMA_USE_STL_UNORDERED_MAP 3470 class VmaDeviceMemoryBlock;
3472 struct VmaAllocation_T
3475 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3479 FLAG_USER_DATA_STRING = 0x01,
3483 enum ALLOCATION_TYPE
3485 ALLOCATION_TYPE_NONE,
3486 ALLOCATION_TYPE_BLOCK,
3487 ALLOCATION_TYPE_DEDICATED,
3490 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3493 m_pUserData(VMA_NULL),
3494 m_LastUseFrameIndex(currentFrameIndex),
3495 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3496 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3498 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3504 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3507 VMA_ASSERT(m_pUserData == VMA_NULL);
3510 void InitBlockAllocation(
3512 VmaDeviceMemoryBlock* block,
3513 VkDeviceSize offset,
3514 VkDeviceSize alignment,
3516 VmaSuballocationType suballocationType,
3520 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3521 VMA_ASSERT(block != VMA_NULL);
3522 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3523 m_Alignment = alignment;
3525 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3526 m_SuballocationType = (uint8_t)suballocationType;
3527 m_BlockAllocation.m_hPool = hPool;
3528 m_BlockAllocation.m_Block = block;
3529 m_BlockAllocation.m_Offset = offset;
3530 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3535 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3536 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3537 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3538 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3539 m_BlockAllocation.m_Block = VMA_NULL;
3540 m_BlockAllocation.m_Offset = 0;
3541 m_BlockAllocation.m_CanBecomeLost =
true;
3544 void ChangeBlockAllocation(
3545 VmaAllocator hAllocator,
3546 VmaDeviceMemoryBlock* block,
3547 VkDeviceSize offset);
3550 void InitDedicatedAllocation(
3551 uint32_t memoryTypeIndex,
3552 VkDeviceMemory hMemory,
3553 VmaSuballocationType suballocationType,
3557 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3558 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3559 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3562 m_SuballocationType = (uint8_t)suballocationType;
3563 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3564 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3565 m_DedicatedAllocation.m_hMemory = hMemory;
3566 m_DedicatedAllocation.m_pMappedData = pMappedData;
3569 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3570 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3571 VkDeviceSize GetSize()
const {
return m_Size; }
3572 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3573 void* GetUserData()
const {
return m_pUserData; }
3574 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3575 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3577 VmaDeviceMemoryBlock* GetBlock()
const 3579 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3580 return m_BlockAllocation.m_Block;
3582 VkDeviceSize GetOffset()
const;
3583 VkDeviceMemory GetMemory()
const;
3584 uint32_t GetMemoryTypeIndex()
const;
3585 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3586 void* GetMappedData()
const;
3587 bool CanBecomeLost()
const;
3588 VmaPool GetPool()
const;
3590 uint32_t GetLastUseFrameIndex()
const 3592 return m_LastUseFrameIndex.load();
3594 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3596 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3606 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3608 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3610 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3621 void BlockAllocMap();
3622 void BlockAllocUnmap();
3623 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3624 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3627 VkDeviceSize m_Alignment;
3628 VkDeviceSize m_Size;
3630 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3632 uint8_t m_SuballocationType;
3639 struct BlockAllocation
3642 VmaDeviceMemoryBlock* m_Block;
3643 VkDeviceSize m_Offset;
3644 bool m_CanBecomeLost;
3648 struct DedicatedAllocation
3650 uint32_t m_MemoryTypeIndex;
3651 VkDeviceMemory m_hMemory;
3652 void* m_pMappedData;
3658 BlockAllocation m_BlockAllocation;
3660 DedicatedAllocation m_DedicatedAllocation;
3663 void FreeUserDataString(VmaAllocator hAllocator);
3670 struct VmaSuballocation
3672 VkDeviceSize offset;
3674 VmaAllocation hAllocation;
3675 VmaSuballocationType type;
3678 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3681 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3696 struct VmaAllocationRequest
3698 VkDeviceSize offset;
3699 VkDeviceSize sumFreeSize;
3700 VkDeviceSize sumItemSize;
3701 VmaSuballocationList::iterator item;
3702 size_t itemsToMakeLostCount;
3704 VkDeviceSize CalcCost()
const 3706 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3714 class VmaBlockMetadata
3717 VmaBlockMetadata(VmaAllocator hAllocator);
3718 ~VmaBlockMetadata();
3719 void Init(VkDeviceSize size);
3722 bool Validate()
const;
3723 VkDeviceSize GetSize()
const {
return m_Size; }
3724 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3725 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3726 VkDeviceSize GetUnusedRangeSizeMax()
const;
3728 bool IsEmpty()
const;
3730 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3733 #if VMA_STATS_STRING_ENABLED 3734 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3738 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3743 bool CreateAllocationRequest(
3744 uint32_t currentFrameIndex,
3745 uint32_t frameInUseCount,
3746 VkDeviceSize bufferImageGranularity,
3747 VkDeviceSize allocSize,
3748 VkDeviceSize allocAlignment,
3749 VmaSuballocationType allocType,
3750 bool canMakeOtherLost,
3751 VmaAllocationRequest* pAllocationRequest);
3753 bool MakeRequestedAllocationsLost(
3754 uint32_t currentFrameIndex,
3755 uint32_t frameInUseCount,
3756 VmaAllocationRequest* pAllocationRequest);
3758 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3762 const VmaAllocationRequest& request,
3763 VmaSuballocationType type,
3764 VkDeviceSize allocSize,
3765 VmaAllocation hAllocation);
3768 void Free(
const VmaAllocation allocation);
3769 void FreeAtOffset(VkDeviceSize offset);
3772 VkDeviceSize m_Size;
3773 uint32_t m_FreeCount;
3774 VkDeviceSize m_SumFreeSize;
3775 VmaSuballocationList m_Suballocations;
3778 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3780 bool ValidateFreeSuballocationList()
const;
3784 bool CheckAllocation(
3785 uint32_t currentFrameIndex,
3786 uint32_t frameInUseCount,
3787 VkDeviceSize bufferImageGranularity,
3788 VkDeviceSize allocSize,
3789 VkDeviceSize allocAlignment,
3790 VmaSuballocationType allocType,
3791 VmaSuballocationList::const_iterator suballocItem,
3792 bool canMakeOtherLost,
3793 VkDeviceSize* pOffset,
3794 size_t* itemsToMakeLostCount,
3795 VkDeviceSize* pSumFreeSize,
3796 VkDeviceSize* pSumItemSize)
const;
3798 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3802 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3805 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3808 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3812 class VmaDeviceMemoryMapping
3815 VmaDeviceMemoryMapping();
3816 ~VmaDeviceMemoryMapping();
3818 void* GetMappedData()
const {
return m_pMappedData; }
3821 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData);
3822 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count);
3826 uint32_t m_MapCount;
3827 void* m_pMappedData;
3836 class VmaDeviceMemoryBlock
3839 uint32_t m_MemoryTypeIndex;
3840 VkDeviceMemory m_hMemory;
3841 VmaDeviceMemoryMapping m_Mapping;
3842 VmaBlockMetadata m_Metadata;
3844 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3846 ~VmaDeviceMemoryBlock()
3848 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3853 uint32_t newMemoryTypeIndex,
3854 VkDeviceMemory newMemory,
3855 VkDeviceSize newSize);
3857 void Destroy(VmaAllocator allocator);
3860 bool Validate()
const;
3863 VkResult Map(VmaAllocator hAllocator, uint32_t count,
void** ppData);
3864 void Unmap(VmaAllocator hAllocator, uint32_t count);
3867 struct VmaPointerLess
3869 bool operator()(
const void* lhs,
const void* rhs)
const 3875 class VmaDefragmentator;
3883 struct VmaBlockVector
3886 VmaAllocator hAllocator,
3887 uint32_t memoryTypeIndex,
3888 VkDeviceSize preferredBlockSize,
3889 size_t minBlockCount,
3890 size_t maxBlockCount,
3891 VkDeviceSize bufferImageGranularity,
3892 uint32_t frameInUseCount,
3896 VkResult CreateMinBlocks();
3898 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3899 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3900 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3901 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3905 bool IsEmpty()
const {
return m_Blocks.empty(); }
3908 VmaPool hCurrentPool,
3909 uint32_t currentFrameIndex,
3910 const VkMemoryRequirements& vkMemReq,
3912 VmaSuballocationType suballocType,
3913 VmaAllocation* pAllocation);
3916 VmaAllocation hAllocation);
3921 #if VMA_STATS_STRING_ENABLED 3922 void PrintDetailedMap(
class VmaJsonWriter& json);
3925 void MakePoolAllocationsLost(
3926 uint32_t currentFrameIndex,
3927 size_t* pLostAllocationCount);
3929 VmaDefragmentator* EnsureDefragmentator(
3930 VmaAllocator hAllocator,
3931 uint32_t currentFrameIndex);
3933 VkResult Defragment(
3935 VkDeviceSize& maxBytesToMove,
3936 uint32_t& maxAllocationsToMove);
3938 void DestroyDefragmentator();
3941 friend class VmaDefragmentator;
3943 const VmaAllocator m_hAllocator;
3944 const uint32_t m_MemoryTypeIndex;
3945 const VkDeviceSize m_PreferredBlockSize;
3946 const size_t m_MinBlockCount;
3947 const size_t m_MaxBlockCount;
3948 const VkDeviceSize m_BufferImageGranularity;
3949 const uint32_t m_FrameInUseCount;
3950 const bool m_IsCustomPool;
3953 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3957 bool m_HasEmptyBlock;
3958 VmaDefragmentator* m_pDefragmentator;
3960 size_t CalcMaxBlockSize()
const;
3963 void Remove(VmaDeviceMemoryBlock* pBlock);
3967 void IncrementallySortBlocks();
3969 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3975 VmaBlockVector m_BlockVector;
3979 VmaAllocator hAllocator,
3983 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3985 #if VMA_STATS_STRING_ENABLED 3990 class VmaDefragmentator
3992 const VmaAllocator m_hAllocator;
3993 VmaBlockVector*
const m_pBlockVector;
3994 uint32_t m_CurrentFrameIndex;
3995 VkDeviceSize m_BytesMoved;
3996 uint32_t m_AllocationsMoved;
3998 struct AllocationInfo
4000 VmaAllocation m_hAllocation;
4001 VkBool32* m_pChanged;
4004 m_hAllocation(VK_NULL_HANDLE),
4005 m_pChanged(VMA_NULL)
4010 struct AllocationInfoSizeGreater
4012 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 4014 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4019 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4023 VmaDeviceMemoryBlock* m_pBlock;
4024 bool m_HasNonMovableAllocations;
4025 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4027 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4029 m_HasNonMovableAllocations(true),
4030 m_Allocations(pAllocationCallbacks),
4031 m_pMappedDataForDefragmentation(VMA_NULL)
4035 void CalcHasNonMovableAllocations()
4037 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4038 const size_t defragmentAllocCount = m_Allocations.size();
4039 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4042 void SortAllocationsBySizeDescecnding()
4044 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4047 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
4048 void Unmap(VmaAllocator hAllocator);
4052 void* m_pMappedDataForDefragmentation;
4055 struct BlockPointerLess
4057 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4059 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4061 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4063 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4069 struct BlockInfoCompareMoveDestination
4071 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4073 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4077 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4081 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4089 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4090 BlockInfoVector m_Blocks;
4092 VkResult DefragmentRound(
4093 VkDeviceSize maxBytesToMove,
4094 uint32_t maxAllocationsToMove);
4096 static bool MoveMakesSense(
4097 size_t dstBlockIndex, VkDeviceSize dstOffset,
4098 size_t srcBlockIndex, VkDeviceSize srcOffset);
4102 VmaAllocator hAllocator,
4103 VmaBlockVector* pBlockVector,
4104 uint32_t currentFrameIndex);
4106 ~VmaDefragmentator();
4108 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4109 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4111 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
4113 VkResult Defragment(
4114 VkDeviceSize maxBytesToMove,
4115 uint32_t maxAllocationsToMove);
4119 struct VmaAllocator_T
4122 bool m_UseKhrDedicatedAllocation;
4124 bool m_AllocationCallbacksSpecified;
4125 VkAllocationCallbacks m_AllocationCallbacks;
4129 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4130 VMA_MUTEX m_HeapSizeLimitMutex;
4132 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4133 VkPhysicalDeviceMemoryProperties m_MemProps;
4136 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4139 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4140 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4141 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4146 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4148 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4152 return m_VulkanFunctions;
4155 VkDeviceSize GetBufferImageGranularity()
const 4158 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4159 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4162 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4163 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4165 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4167 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4168 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4171 void GetBufferMemoryRequirements(
4173 VkMemoryRequirements& memReq,
4174 bool& requiresDedicatedAllocation,
4175 bool& prefersDedicatedAllocation)
const;
4176 void GetImageMemoryRequirements(
4178 VkMemoryRequirements& memReq,
4179 bool& requiresDedicatedAllocation,
4180 bool& prefersDedicatedAllocation)
const;
4183 VkResult AllocateMemory(
4184 const VkMemoryRequirements& vkMemReq,
4185 bool requiresDedicatedAllocation,
4186 bool prefersDedicatedAllocation,
4187 VkBuffer dedicatedBuffer,
4188 VkImage dedicatedImage,
4190 VmaSuballocationType suballocType,
4191 VmaAllocation* pAllocation);
4194 void FreeMemory(
const VmaAllocation allocation);
4196 void CalculateStats(
VmaStats* pStats);
4198 #if VMA_STATS_STRING_ENABLED 4199 void PrintDetailedMap(
class VmaJsonWriter& json);
4202 VkResult Defragment(
4203 VmaAllocation* pAllocations,
4204 size_t allocationCount,
4205 VkBool32* pAllocationsChanged,
4209 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
4210 bool TouchAllocation(VmaAllocation hAllocation);
4213 void DestroyPool(VmaPool pool);
4214 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
4216 void SetCurrentFrameIndex(uint32_t frameIndex);
4218 void MakePoolAllocationsLost(
4220 size_t* pLostAllocationCount);
4222 void CreateLostAllocation(VmaAllocation* pAllocation);
4224 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4225 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4227 VkResult Map(VmaAllocation hAllocation,
void** ppData);
4228 void Unmap(VmaAllocation hAllocation);
4231 VkDeviceSize m_PreferredLargeHeapBlockSize;
4233 VkPhysicalDevice m_PhysicalDevice;
4234 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4236 VMA_MUTEX m_PoolsMutex;
4238 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4244 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4246 VkResult AllocateMemoryOfType(
4247 const VkMemoryRequirements& vkMemReq,
4248 bool dedicatedAllocation,
4249 VkBuffer dedicatedBuffer,
4250 VkImage dedicatedImage,
4252 uint32_t memTypeIndex,
4253 VmaSuballocationType suballocType,
4254 VmaAllocation* pAllocation);
4257 VkResult AllocateDedicatedMemory(
4259 VmaSuballocationType suballocType,
4260 uint32_t memTypeIndex,
4262 bool isUserDataString,
4264 VkBuffer dedicatedBuffer,
4265 VkImage dedicatedImage,
4266 VmaAllocation* pAllocation);
4269 void FreeDedicatedMemory(VmaAllocation allocation);
4275 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
4277 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4280 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
4282 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4285 template<
typename T>
4286 static T* VmaAllocate(VmaAllocator hAllocator)
4288 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4291 template<
typename T>
4292 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
4294 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4297 template<
typename T>
4298 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4303 VmaFree(hAllocator, ptr);
4307 template<
typename T>
4308 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4312 for(
size_t i = count; i--; )
4314 VmaFree(hAllocator, ptr);
4321 #if VMA_STATS_STRING_ENABLED 4323 class VmaStringBuilder
4326 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4327 size_t GetLength()
const {
return m_Data.size(); }
4328 const char* GetData()
const {
return m_Data.data(); }
4330 void Add(
char ch) { m_Data.push_back(ch); }
4331 void Add(
const char* pStr);
4332 void AddNewLine() { Add(
'\n'); }
4333 void AddNumber(uint32_t num);
4334 void AddNumber(uint64_t num);
4335 void AddPointer(
const void* ptr);
4338 VmaVector< char, VmaStlAllocator<char> > m_Data;
4341 void VmaStringBuilder::Add(
const char* pStr)
4343 const size_t strLen = strlen(pStr);
4346 const size_t oldCount = m_Data.size();
4347 m_Data.resize(oldCount + strLen);
4348 memcpy(m_Data.data() + oldCount, pStr, strLen);
4352 void VmaStringBuilder::AddNumber(uint32_t num)
4355 VmaUint32ToStr(buf,
sizeof(buf), num);
4359 void VmaStringBuilder::AddNumber(uint64_t num)
4362 VmaUint64ToStr(buf,
sizeof(buf), num);
4366 void VmaStringBuilder::AddPointer(
const void* ptr)
4369 VmaPtrToStr(buf,
sizeof(buf), ptr);
4373 #endif // #if VMA_STATS_STRING_ENABLED 4378 #if VMA_STATS_STRING_ENABLED 4383 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4386 void BeginObject(
bool singleLine =
false);
4389 void BeginArray(
bool singleLine =
false);
4392 void WriteString(
const char* pStr);
4393 void BeginString(
const char* pStr = VMA_NULL);
4394 void ContinueString(
const char* pStr);
4395 void ContinueString(uint32_t n);
4396 void ContinueString(uint64_t n);
4397 void ContinueString_Pointer(
const void* ptr);
4398 void EndString(
const char* pStr = VMA_NULL);
4400 void WriteNumber(uint32_t n);
4401 void WriteNumber(uint64_t n);
4402 void WriteBool(
bool b);
4406 static const char*
const INDENT;
4408 enum COLLECTION_TYPE
4410 COLLECTION_TYPE_OBJECT,
4411 COLLECTION_TYPE_ARRAY,
4415 COLLECTION_TYPE type;
4416 uint32_t valueCount;
4417 bool singleLineMode;
4420 VmaStringBuilder& m_SB;
4421 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4422 bool m_InsideString;
4424 void BeginValue(
bool isString);
4425 void WriteIndent(
bool oneLess =
false);
4428 const char*
const VmaJsonWriter::INDENT =
" ";
4430 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4432 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4433 m_InsideString(false)
4437 VmaJsonWriter::~VmaJsonWriter()
4439 VMA_ASSERT(!m_InsideString);
4440 VMA_ASSERT(m_Stack.empty());
4443 void VmaJsonWriter::BeginObject(
bool singleLine)
4445 VMA_ASSERT(!m_InsideString);
4451 item.type = COLLECTION_TYPE_OBJECT;
4452 item.valueCount = 0;
4453 item.singleLineMode = singleLine;
4454 m_Stack.push_back(item);
4457 void VmaJsonWriter::EndObject()
4459 VMA_ASSERT(!m_InsideString);
4464 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4468 void VmaJsonWriter::BeginArray(
bool singleLine)
4470 VMA_ASSERT(!m_InsideString);
4476 item.type = COLLECTION_TYPE_ARRAY;
4477 item.valueCount = 0;
4478 item.singleLineMode = singleLine;
4479 m_Stack.push_back(item);
4482 void VmaJsonWriter::EndArray()
4484 VMA_ASSERT(!m_InsideString);
4489 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4493 void VmaJsonWriter::WriteString(
const char* pStr)
4499 void VmaJsonWriter::BeginString(
const char* pStr)
4501 VMA_ASSERT(!m_InsideString);
4505 m_InsideString =
true;
4506 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4508 ContinueString(pStr);
4512 void VmaJsonWriter::ContinueString(
const char* pStr)
4514 VMA_ASSERT(m_InsideString);
4516 const size_t strLen = strlen(pStr);
4517 for(
size_t i = 0; i < strLen; ++i)
4550 VMA_ASSERT(0 &&
"Character not currently supported.");
4556 void VmaJsonWriter::ContinueString(uint32_t n)
4558 VMA_ASSERT(m_InsideString);
4562 void VmaJsonWriter::ContinueString(uint64_t n)
4564 VMA_ASSERT(m_InsideString);
4568 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4570 VMA_ASSERT(m_InsideString);
4571 m_SB.AddPointer(ptr);
4574 void VmaJsonWriter::EndString(
const char* pStr)
4576 VMA_ASSERT(m_InsideString);
4577 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4579 ContinueString(pStr);
4582 m_InsideString =
false;
4585 void VmaJsonWriter::WriteNumber(uint32_t n)
4587 VMA_ASSERT(!m_InsideString);
4592 void VmaJsonWriter::WriteNumber(uint64_t n)
4594 VMA_ASSERT(!m_InsideString);
4599 void VmaJsonWriter::WriteBool(
bool b)
4601 VMA_ASSERT(!m_InsideString);
4603 m_SB.Add(b ?
"true" :
"false");
4606 void VmaJsonWriter::WriteNull()
4608 VMA_ASSERT(!m_InsideString);
4613 void VmaJsonWriter::BeginValue(
bool isString)
4615 if(!m_Stack.empty())
4617 StackItem& currItem = m_Stack.back();
4618 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4619 currItem.valueCount % 2 == 0)
4621 VMA_ASSERT(isString);
4624 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4625 currItem.valueCount % 2 != 0)
4629 else if(currItem.valueCount > 0)
4638 ++currItem.valueCount;
4642 void VmaJsonWriter::WriteIndent(
bool oneLess)
4644 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4648 size_t count = m_Stack.size();
4649 if(count > 0 && oneLess)
4653 for(
size_t i = 0; i < count; ++i)
4660 #endif // #if VMA_STATS_STRING_ENABLED 4664 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4666 if(IsUserDataString())
4668 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4670 FreeUserDataString(hAllocator);
4672 if(pUserData != VMA_NULL)
4674 const char*
const newStrSrc = (
char*)pUserData;
4675 const size_t newStrLen = strlen(newStrSrc);
4676 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4677 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4678 m_pUserData = newStrDst;
4683 m_pUserData = pUserData;
4687 void VmaAllocation_T::ChangeBlockAllocation(
4688 VmaAllocator hAllocator,
4689 VmaDeviceMemoryBlock* block,
4690 VkDeviceSize offset)
4692 VMA_ASSERT(block != VMA_NULL);
4693 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4696 if(block != m_BlockAllocation.m_Block)
4698 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4699 if(IsPersistentMap())
4701 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4702 block->Map(hAllocator, mapRefCount, VMA_NULL);
4705 m_BlockAllocation.m_Block = block;
4706 m_BlockAllocation.m_Offset = offset;
4709 VkDeviceSize VmaAllocation_T::GetOffset()
const 4713 case ALLOCATION_TYPE_BLOCK:
4714 return m_BlockAllocation.m_Offset;
4715 case ALLOCATION_TYPE_DEDICATED:
4723 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4727 case ALLOCATION_TYPE_BLOCK:
4728 return m_BlockAllocation.m_Block->m_hMemory;
4729 case ALLOCATION_TYPE_DEDICATED:
4730 return m_DedicatedAllocation.m_hMemory;
4733 return VK_NULL_HANDLE;
4737 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4741 case ALLOCATION_TYPE_BLOCK:
4742 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4743 case ALLOCATION_TYPE_DEDICATED:
4744 return m_DedicatedAllocation.m_MemoryTypeIndex;
4751 void* VmaAllocation_T::GetMappedData()
const 4755 case ALLOCATION_TYPE_BLOCK:
4758 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4759 VMA_ASSERT(pBlockData != VMA_NULL);
4760 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4767 case ALLOCATION_TYPE_DEDICATED:
4768 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4769 return m_DedicatedAllocation.m_pMappedData;
4776 bool VmaAllocation_T::CanBecomeLost()
const 4780 case ALLOCATION_TYPE_BLOCK:
4781 return m_BlockAllocation.m_CanBecomeLost;
4782 case ALLOCATION_TYPE_DEDICATED:
4790 VmaPool VmaAllocation_T::GetPool()
const 4792 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4793 return m_BlockAllocation.m_hPool;
4796 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4798 VMA_ASSERT(CanBecomeLost());
4804 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4807 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4812 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4818 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4828 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4830 VMA_ASSERT(IsUserDataString());
4831 if(m_pUserData != VMA_NULL)
4833 char*
const oldStr = (
char*)m_pUserData;
4834 const size_t oldStrLen = strlen(oldStr);
4835 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4836 m_pUserData = VMA_NULL;
4840 void VmaAllocation_T::BlockAllocMap()
4842 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4844 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4850 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4854 void VmaAllocation_T::BlockAllocUnmap()
4856 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4858 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4864 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4868 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4870 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4874 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4876 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4877 *ppData = m_DedicatedAllocation.m_pMappedData;
4883 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4884 return VK_ERROR_MEMORY_MAP_FAILED;
4889 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4890 hAllocator->m_hDevice,
4891 m_DedicatedAllocation.m_hMemory,
4896 if(result == VK_SUCCESS)
4898 m_DedicatedAllocation.m_pMappedData = *ppData;
4905 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4907 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4909 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4914 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4915 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4916 hAllocator->m_hDevice,
4917 m_DedicatedAllocation.m_hMemory);
4922 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4926 #if VMA_STATS_STRING_ENABLED 4929 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4938 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4942 json.WriteString(
"Blocks");
4945 json.WriteString(
"Allocations");
4948 json.WriteString(
"UnusedRanges");
4951 json.WriteString(
"UsedBytes");
4954 json.WriteString(
"UnusedBytes");
4959 json.WriteString(
"AllocationSize");
4960 json.BeginObject(
true);
4961 json.WriteString(
"Min");
4963 json.WriteString(
"Avg");
4965 json.WriteString(
"Max");
4972 json.WriteString(
"UnusedRangeSize");
4973 json.BeginObject(
true);
4974 json.WriteString(
"Min");
4976 json.WriteString(
"Avg");
4978 json.WriteString(
"Max");
4986 #endif // #if VMA_STATS_STRING_ENABLED 4988 struct VmaSuballocationItemSizeLess
4991 const VmaSuballocationList::iterator lhs,
4992 const VmaSuballocationList::iterator rhs)
const 4994 return lhs->size < rhs->size;
4997 const VmaSuballocationList::iterator lhs,
4998 VkDeviceSize rhsSize)
const 5000 return lhs->size < rhsSize;
5007 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
5011 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5012 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5016 VmaBlockMetadata::~VmaBlockMetadata()
5020 void VmaBlockMetadata::Init(VkDeviceSize size)
5024 m_SumFreeSize = size;
5026 VmaSuballocation suballoc = {};
5027 suballoc.offset = 0;
5028 suballoc.size = size;
5029 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5030 suballoc.hAllocation = VK_NULL_HANDLE;
5032 m_Suballocations.push_back(suballoc);
5033 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5035 m_FreeSuballocationsBySize.push_back(suballocItem);
5038 bool VmaBlockMetadata::Validate()
const 5040 if(m_Suballocations.empty())
5046 VkDeviceSize calculatedOffset = 0;
5048 uint32_t calculatedFreeCount = 0;
5050 VkDeviceSize calculatedSumFreeSize = 0;
5053 size_t freeSuballocationsToRegister = 0;
5055 bool prevFree =
false;
5057 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5058 suballocItem != m_Suballocations.cend();
5061 const VmaSuballocation& subAlloc = *suballocItem;
5064 if(subAlloc.offset != calculatedOffset)
5069 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5071 if(prevFree && currFree)
5076 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5083 calculatedSumFreeSize += subAlloc.size;
5084 ++calculatedFreeCount;
5085 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5087 ++freeSuballocationsToRegister;
5092 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5096 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5102 calculatedOffset += subAlloc.size;
5103 prevFree = currFree;
5108 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5113 VkDeviceSize lastSize = 0;
5114 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5116 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5119 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5124 if(suballocItem->size < lastSize)
5129 lastSize = suballocItem->size;
5133 if(!ValidateFreeSuballocationList() ||
5134 (calculatedOffset != m_Size) ||
5135 (calculatedSumFreeSize != m_SumFreeSize) ||
5136 (calculatedFreeCount != m_FreeCount))
5144 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5146 if(!m_FreeSuballocationsBySize.empty())
5148 return m_FreeSuballocationsBySize.back()->size;
5156 bool VmaBlockMetadata::IsEmpty()
const 5158 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5161 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5165 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5177 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5178 suballocItem != m_Suballocations.cend();
5181 const VmaSuballocation& suballoc = *suballocItem;
5182 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5195 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5197 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5199 inoutStats.
size += m_Size;
5206 #if VMA_STATS_STRING_ENABLED 5208 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5212 json.WriteString(
"TotalBytes");
5213 json.WriteNumber(m_Size);
5215 json.WriteString(
"UnusedBytes");
5216 json.WriteNumber(m_SumFreeSize);
5218 json.WriteString(
"Allocations");
5219 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5221 json.WriteString(
"UnusedRanges");
5222 json.WriteNumber(m_FreeCount);
5224 json.WriteString(
"Suballocations");
5227 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5228 suballocItem != m_Suballocations.cend();
5229 ++suballocItem, ++i)
5231 json.BeginObject(
true);
5233 json.WriteString(
"Type");
5234 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5236 json.WriteString(
"Size");
5237 json.WriteNumber(suballocItem->size);
5239 json.WriteString(
"Offset");
5240 json.WriteNumber(suballocItem->offset);
5242 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5244 const void* pUserData = suballocItem->hAllocation->GetUserData();
5245 if(pUserData != VMA_NULL)
5247 json.WriteString(
"UserData");
5248 if(suballocItem->hAllocation->IsUserDataString())
5250 json.WriteString((
const char*)pUserData);
5255 json.ContinueString_Pointer(pUserData);
5268 #endif // #if VMA_STATS_STRING_ENABLED 5280 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5282 VMA_ASSERT(IsEmpty());
5283 pAllocationRequest->offset = 0;
5284 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5285 pAllocationRequest->sumItemSize = 0;
5286 pAllocationRequest->item = m_Suballocations.begin();
5287 pAllocationRequest->itemsToMakeLostCount = 0;
5290 bool VmaBlockMetadata::CreateAllocationRequest(
5291 uint32_t currentFrameIndex,
5292 uint32_t frameInUseCount,
5293 VkDeviceSize bufferImageGranularity,
5294 VkDeviceSize allocSize,
5295 VkDeviceSize allocAlignment,
5296 VmaSuballocationType allocType,
5297 bool canMakeOtherLost,
5298 VmaAllocationRequest* pAllocationRequest)
5300 VMA_ASSERT(allocSize > 0);
5301 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5302 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5303 VMA_HEAVY_ASSERT(Validate());
5306 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5312 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5313 if(freeSuballocCount > 0)
5318 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5319 m_FreeSuballocationsBySize.data(),
5320 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5322 VmaSuballocationItemSizeLess());
5323 size_t index = it - m_FreeSuballocationsBySize.data();
5324 for(; index < freeSuballocCount; ++index)
5329 bufferImageGranularity,
5333 m_FreeSuballocationsBySize[index],
5335 &pAllocationRequest->offset,
5336 &pAllocationRequest->itemsToMakeLostCount,
5337 &pAllocationRequest->sumFreeSize,
5338 &pAllocationRequest->sumItemSize))
5340 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5348 for(
size_t index = freeSuballocCount; index--; )
5353 bufferImageGranularity,
5357 m_FreeSuballocationsBySize[index],
5359 &pAllocationRequest->offset,
5360 &pAllocationRequest->itemsToMakeLostCount,
5361 &pAllocationRequest->sumFreeSize,
5362 &pAllocationRequest->sumItemSize))
5364 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5371 if(canMakeOtherLost)
5375 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5376 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5378 VmaAllocationRequest tmpAllocRequest = {};
5379 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5380 suballocIt != m_Suballocations.end();
5383 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5384 suballocIt->hAllocation->CanBecomeLost())
5389 bufferImageGranularity,
5395 &tmpAllocRequest.offset,
5396 &tmpAllocRequest.itemsToMakeLostCount,
5397 &tmpAllocRequest.sumFreeSize,
5398 &tmpAllocRequest.sumItemSize))
5400 tmpAllocRequest.item = suballocIt;
5402 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5404 *pAllocationRequest = tmpAllocRequest;
5410 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5419 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5420 uint32_t currentFrameIndex,
5421 uint32_t frameInUseCount,
5422 VmaAllocationRequest* pAllocationRequest)
5424 while(pAllocationRequest->itemsToMakeLostCount > 0)
5426 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5428 ++pAllocationRequest->item;
5430 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5431 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5432 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5433 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5435 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5436 --pAllocationRequest->itemsToMakeLostCount;
5444 VMA_HEAVY_ASSERT(Validate());
5445 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5446 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5451 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5453 uint32_t lostAllocationCount = 0;
5454 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5455 it != m_Suballocations.end();
5458 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5459 it->hAllocation->CanBecomeLost() &&
5460 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5462 it = FreeSuballocation(it);
5463 ++lostAllocationCount;
5466 return lostAllocationCount;
5469 void VmaBlockMetadata::Alloc(
5470 const VmaAllocationRequest& request,
5471 VmaSuballocationType type,
5472 VkDeviceSize allocSize,
5473 VmaAllocation hAllocation)
5475 VMA_ASSERT(request.item != m_Suballocations.end());
5476 VmaSuballocation& suballoc = *request.item;
5478 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5480 VMA_ASSERT(request.offset >= suballoc.offset);
5481 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5482 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5483 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5487 UnregisterFreeSuballocation(request.item);
5489 suballoc.offset = request.offset;
5490 suballoc.size = allocSize;
5491 suballoc.type = type;
5492 suballoc.hAllocation = hAllocation;
5497 VmaSuballocation paddingSuballoc = {};
5498 paddingSuballoc.offset = request.offset + allocSize;
5499 paddingSuballoc.size = paddingEnd;
5500 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5501 VmaSuballocationList::iterator next = request.item;
5503 const VmaSuballocationList::iterator paddingEndItem =
5504 m_Suballocations.insert(next, paddingSuballoc);
5505 RegisterFreeSuballocation(paddingEndItem);
5511 VmaSuballocation paddingSuballoc = {};
5512 paddingSuballoc.offset = request.offset - paddingBegin;
5513 paddingSuballoc.size = paddingBegin;
5514 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5515 const VmaSuballocationList::iterator paddingBeginItem =
5516 m_Suballocations.insert(request.item, paddingSuballoc);
5517 RegisterFreeSuballocation(paddingBeginItem);
5521 m_FreeCount = m_FreeCount - 1;
5522 if(paddingBegin > 0)
5530 m_SumFreeSize -= allocSize;
5533 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5535 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5536 suballocItem != m_Suballocations.end();
5539 VmaSuballocation& suballoc = *suballocItem;
5540 if(suballoc.hAllocation == allocation)
5542 FreeSuballocation(suballocItem);
5543 VMA_HEAVY_ASSERT(Validate());
5547 VMA_ASSERT(0 &&
"Not found!");
5550 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5552 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5553 suballocItem != m_Suballocations.end();
5556 VmaSuballocation& suballoc = *suballocItem;
5557 if(suballoc.offset == offset)
5559 FreeSuballocation(suballocItem);
5563 VMA_ASSERT(0 &&
"Not found!");
5566 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5568 VkDeviceSize lastSize = 0;
5569 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5571 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5573 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5578 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5583 if(it->size < lastSize)
5589 lastSize = it->size;
5594 bool VmaBlockMetadata::CheckAllocation(
5595 uint32_t currentFrameIndex,
5596 uint32_t frameInUseCount,
5597 VkDeviceSize bufferImageGranularity,
5598 VkDeviceSize allocSize,
5599 VkDeviceSize allocAlignment,
5600 VmaSuballocationType allocType,
5601 VmaSuballocationList::const_iterator suballocItem,
5602 bool canMakeOtherLost,
5603 VkDeviceSize* pOffset,
5604 size_t* itemsToMakeLostCount,
5605 VkDeviceSize* pSumFreeSize,
5606 VkDeviceSize* pSumItemSize)
const 5608 VMA_ASSERT(allocSize > 0);
5609 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5610 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5611 VMA_ASSERT(pOffset != VMA_NULL);
5613 *itemsToMakeLostCount = 0;
5617 if(canMakeOtherLost)
5619 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5621 *pSumFreeSize = suballocItem->size;
5625 if(suballocItem->hAllocation->CanBecomeLost() &&
5626 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5628 ++*itemsToMakeLostCount;
5629 *pSumItemSize = suballocItem->size;
5638 if(m_Size - suballocItem->offset < allocSize)
5644 *pOffset = suballocItem->offset;
5647 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5649 *pOffset += VMA_DEBUG_MARGIN;
5653 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5654 *pOffset = VmaAlignUp(*pOffset, alignment);
5658 if(bufferImageGranularity > 1)
5660 bool bufferImageGranularityConflict =
false;
5661 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5662 while(prevSuballocItem != m_Suballocations.cbegin())
5665 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5666 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5668 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5670 bufferImageGranularityConflict =
true;
5678 if(bufferImageGranularityConflict)
5680 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5686 if(*pOffset >= suballocItem->offset + suballocItem->size)
5692 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5695 VmaSuballocationList::const_iterator next = suballocItem;
5697 const VkDeviceSize requiredEndMargin =
5698 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5700 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5702 if(suballocItem->offset + totalSize > m_Size)
5709 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5710 if(totalSize > suballocItem->size)
5712 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5713 while(remainingSize > 0)
5716 if(lastSuballocItem == m_Suballocations.cend())
5720 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5722 *pSumFreeSize += lastSuballocItem->size;
5726 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5727 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5728 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5730 ++*itemsToMakeLostCount;
5731 *pSumItemSize += lastSuballocItem->size;
5738 remainingSize = (lastSuballocItem->size < remainingSize) ?
5739 remainingSize - lastSuballocItem->size : 0;
5745 if(bufferImageGranularity > 1)
5747 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5749 while(nextSuballocItem != m_Suballocations.cend())
5751 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5752 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5754 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5756 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5757 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5758 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5760 ++*itemsToMakeLostCount;
5779 const VmaSuballocation& suballoc = *suballocItem;
5780 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5782 *pSumFreeSize = suballoc.size;
5785 if(suballoc.size < allocSize)
5791 *pOffset = suballoc.offset;
5794 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5796 *pOffset += VMA_DEBUG_MARGIN;
5800 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5801 *pOffset = VmaAlignUp(*pOffset, alignment);
5805 if(bufferImageGranularity > 1)
5807 bool bufferImageGranularityConflict =
false;
5808 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5809 while(prevSuballocItem != m_Suballocations.cbegin())
5812 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5813 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5815 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5817 bufferImageGranularityConflict =
true;
5825 if(bufferImageGranularityConflict)
5827 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5832 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5835 VmaSuballocationList::const_iterator next = suballocItem;
5837 const VkDeviceSize requiredEndMargin =
5838 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5841 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5848 if(bufferImageGranularity > 1)
5850 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5852 while(nextSuballocItem != m_Suballocations.cend())
5854 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5855 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5857 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5876 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5878 VMA_ASSERT(item != m_Suballocations.end());
5879 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5881 VmaSuballocationList::iterator nextItem = item;
5883 VMA_ASSERT(nextItem != m_Suballocations.end());
5884 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5886 item->size += nextItem->size;
5888 m_Suballocations.erase(nextItem);
5891 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5894 VmaSuballocation& suballoc = *suballocItem;
5895 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5896 suballoc.hAllocation = VK_NULL_HANDLE;
5900 m_SumFreeSize += suballoc.size;
5903 bool mergeWithNext =
false;
5904 bool mergeWithPrev =
false;
5906 VmaSuballocationList::iterator nextItem = suballocItem;
5908 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5910 mergeWithNext =
true;
5913 VmaSuballocationList::iterator prevItem = suballocItem;
5914 if(suballocItem != m_Suballocations.begin())
5917 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5919 mergeWithPrev =
true;
5925 UnregisterFreeSuballocation(nextItem);
5926 MergeFreeWithNext(suballocItem);
5931 UnregisterFreeSuballocation(prevItem);
5932 MergeFreeWithNext(prevItem);
5933 RegisterFreeSuballocation(prevItem);
5938 RegisterFreeSuballocation(suballocItem);
5939 return suballocItem;
5943 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5945 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5946 VMA_ASSERT(item->size > 0);
5950 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5952 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5954 if(m_FreeSuballocationsBySize.empty())
5956 m_FreeSuballocationsBySize.push_back(item);
5960 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5968 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5970 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5971 VMA_ASSERT(item->size > 0);
5975 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5977 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5979 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5980 m_FreeSuballocationsBySize.data(),
5981 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5983 VmaSuballocationItemSizeLess());
5984 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5985 index < m_FreeSuballocationsBySize.size();
5988 if(m_FreeSuballocationsBySize[index] == item)
5990 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5993 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5995 VMA_ASSERT(0 &&
"Not found.");
6004 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
6006 m_pMappedData(VMA_NULL)
6010 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
6012 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6015 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData)
6022 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6025 m_MapCount += count;
6026 VMA_ASSERT(m_pMappedData != VMA_NULL);
6027 if(ppData != VMA_NULL)
6029 *ppData = m_pMappedData;
6035 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6036 hAllocator->m_hDevice,
6042 if(result == VK_SUCCESS)
6044 if(ppData != VMA_NULL)
6046 *ppData = m_pMappedData;
6054 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count)
6061 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6062 if(m_MapCount >= count)
6064 m_MapCount -= count;
6067 m_pMappedData = VMA_NULL;
6068 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
6073 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6080 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
6081 m_MemoryTypeIndex(UINT32_MAX),
6082 m_hMemory(VK_NULL_HANDLE),
6083 m_Metadata(hAllocator)
6087 void VmaDeviceMemoryBlock::Init(
6088 uint32_t newMemoryTypeIndex,
6089 VkDeviceMemory newMemory,
6090 VkDeviceSize newSize)
6092 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6094 m_MemoryTypeIndex = newMemoryTypeIndex;
6095 m_hMemory = newMemory;
6097 m_Metadata.Init(newSize);
6100 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
6104 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6106 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6107 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6108 m_hMemory = VK_NULL_HANDLE;
6111 bool VmaDeviceMemoryBlock::Validate()
const 6113 if((m_hMemory == VK_NULL_HANDLE) ||
6114 (m_Metadata.GetSize() == 0))
6119 return m_Metadata.Validate();
6122 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count,
void** ppData)
6124 return m_Mapping.Map(hAllocator, m_hMemory, count, ppData);
6127 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
6129 m_Mapping.Unmap(hAllocator, m_hMemory, count);
6134 memset(&outInfo, 0,
sizeof(outInfo));
6153 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6161 VmaPool_T::VmaPool_T(
6162 VmaAllocator hAllocator,
6166 createInfo.memoryTypeIndex,
6167 createInfo.blockSize,
6168 createInfo.minBlockCount,
6169 createInfo.maxBlockCount,
6171 createInfo.frameInUseCount,
6176 VmaPool_T::~VmaPool_T()
6180 #if VMA_STATS_STRING_ENABLED 6182 #endif // #if VMA_STATS_STRING_ENABLED 6184 VmaBlockVector::VmaBlockVector(
6185 VmaAllocator hAllocator,
6186 uint32_t memoryTypeIndex,
6187 VkDeviceSize preferredBlockSize,
6188 size_t minBlockCount,
6189 size_t maxBlockCount,
6190 VkDeviceSize bufferImageGranularity,
6191 uint32_t frameInUseCount,
6192 bool isCustomPool) :
6193 m_hAllocator(hAllocator),
6194 m_MemoryTypeIndex(memoryTypeIndex),
6195 m_PreferredBlockSize(preferredBlockSize),
6196 m_MinBlockCount(minBlockCount),
6197 m_MaxBlockCount(maxBlockCount),
6198 m_BufferImageGranularity(bufferImageGranularity),
6199 m_FrameInUseCount(frameInUseCount),
6200 m_IsCustomPool(isCustomPool),
6201 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6202 m_HasEmptyBlock(false),
6203 m_pDefragmentator(VMA_NULL)
6207 VmaBlockVector::~VmaBlockVector()
6209 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6211 for(
size_t i = m_Blocks.size(); i--; )
6213 m_Blocks[i]->Destroy(m_hAllocator);
6214 vma_delete(m_hAllocator, m_Blocks[i]);
6218 VkResult VmaBlockVector::CreateMinBlocks()
6220 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6222 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6223 if(res != VK_SUCCESS)
6231 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6239 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6241 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6243 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6245 VMA_HEAVY_ASSERT(pBlock->Validate());
6246 pBlock->m_Metadata.AddPoolStats(*pStats);
6250 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6252 VkResult VmaBlockVector::Allocate(
6253 VmaPool hCurrentPool,
6254 uint32_t currentFrameIndex,
6255 const VkMemoryRequirements& vkMemReq,
6257 VmaSuballocationType suballocType,
6258 VmaAllocation* pAllocation)
6263 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6267 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6269 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6270 VMA_ASSERT(pCurrBlock);
6271 VmaAllocationRequest currRequest = {};
6272 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6275 m_BufferImageGranularity,
6283 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6287 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6288 if(res != VK_SUCCESS)
6295 if(pCurrBlock->m_Metadata.IsEmpty())
6297 m_HasEmptyBlock =
false;
6300 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6301 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6302 (*pAllocation)->InitBlockAllocation(
6311 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6312 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6313 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6318 const bool canCreateNewBlock =
6320 (m_Blocks.size() < m_MaxBlockCount);
6323 if(canCreateNewBlock)
6326 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6327 uint32_t newBlockSizeShift = 0;
6328 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6332 if(m_IsCustomPool ==
false)
6335 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6336 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6338 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6339 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6341 newBlockSize = smallerNewBlockSize;
6342 ++newBlockSizeShift;
6351 size_t newBlockIndex = 0;
6352 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6354 if(m_IsCustomPool ==
false)
6356 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6358 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6359 if(smallerNewBlockSize >= vkMemReq.size)
6361 newBlockSize = smallerNewBlockSize;
6362 ++newBlockSizeShift;
6363 res = CreateBlock(newBlockSize, &newBlockIndex);
6372 if(res == VK_SUCCESS)
6374 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6375 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6379 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6380 if(res != VK_SUCCESS)
6387 VmaAllocationRequest allocRequest;
6388 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6389 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6390 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6391 (*pAllocation)->InitBlockAllocation(
6394 allocRequest.offset,
6400 VMA_HEAVY_ASSERT(pBlock->Validate());
6401 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6402 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6410 if(canMakeOtherLost)
6412 uint32_t tryIndex = 0;
6413 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6415 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6416 VmaAllocationRequest bestRequest = {};
6417 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6421 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6423 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6424 VMA_ASSERT(pCurrBlock);
6425 VmaAllocationRequest currRequest = {};
6426 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6429 m_BufferImageGranularity,
6436 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6437 if(pBestRequestBlock == VMA_NULL ||
6438 currRequestCost < bestRequestCost)
6440 pBestRequestBlock = pCurrBlock;
6441 bestRequest = currRequest;
6442 bestRequestCost = currRequestCost;
6444 if(bestRequestCost == 0)
6452 if(pBestRequestBlock != VMA_NULL)
6456 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6457 if(res != VK_SUCCESS)
6463 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6469 if(pBestRequestBlock->m_Metadata.IsEmpty())
6471 m_HasEmptyBlock =
false;
6474 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6475 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6476 (*pAllocation)->InitBlockAllocation(
6485 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6486 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6487 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6501 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6503 return VK_ERROR_TOO_MANY_OBJECTS;
6507 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6510 void VmaBlockVector::Free(
6511 VmaAllocation hAllocation)
6513 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6517 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6519 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6521 if(hAllocation->IsPersistentMap())
6523 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory, 1);
6526 pBlock->m_Metadata.Free(hAllocation);
6527 VMA_HEAVY_ASSERT(pBlock->Validate());
6529 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6532 if(pBlock->m_Metadata.IsEmpty())
6535 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6537 pBlockToDelete = pBlock;
6543 m_HasEmptyBlock =
true;
6548 else if(m_HasEmptyBlock)
6550 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6551 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6553 pBlockToDelete = pLastBlock;
6554 m_Blocks.pop_back();
6555 m_HasEmptyBlock =
false;
6559 IncrementallySortBlocks();
6564 if(pBlockToDelete != VMA_NULL)
6566 VMA_DEBUG_LOG(
" Deleted empty allocation");
6567 pBlockToDelete->Destroy(m_hAllocator);
6568 vma_delete(m_hAllocator, pBlockToDelete);
6572 size_t VmaBlockVector::CalcMaxBlockSize()
const 6575 for(
size_t i = m_Blocks.size(); i--; )
6577 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6578 if(result >= m_PreferredBlockSize)
6586 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6588 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6590 if(m_Blocks[blockIndex] == pBlock)
6592 VmaVectorRemove(m_Blocks, blockIndex);
6599 void VmaBlockVector::IncrementallySortBlocks()
6602 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6604 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6606 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6612 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6614 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6615 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6616 allocInfo.allocationSize = blockSize;
6617 VkDeviceMemory mem = VK_NULL_HANDLE;
6618 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6627 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6631 allocInfo.allocationSize);
6633 m_Blocks.push_back(pBlock);
6634 if(pNewBlockIndex != VMA_NULL)
6636 *pNewBlockIndex = m_Blocks.size() - 1;
6642 #if VMA_STATS_STRING_ENABLED 6644 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6646 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6652 json.WriteString(
"MemoryTypeIndex");
6653 json.WriteNumber(m_MemoryTypeIndex);
6655 json.WriteString(
"BlockSize");
6656 json.WriteNumber(m_PreferredBlockSize);
6658 json.WriteString(
"BlockCount");
6659 json.BeginObject(
true);
6660 if(m_MinBlockCount > 0)
6662 json.WriteString(
"Min");
6663 json.WriteNumber((uint64_t)m_MinBlockCount);
6665 if(m_MaxBlockCount < SIZE_MAX)
6667 json.WriteString(
"Max");
6668 json.WriteNumber((uint64_t)m_MaxBlockCount);
6670 json.WriteString(
"Cur");
6671 json.WriteNumber((uint64_t)m_Blocks.size());
6674 if(m_FrameInUseCount > 0)
6676 json.WriteString(
"FrameInUseCount");
6677 json.WriteNumber(m_FrameInUseCount);
6682 json.WriteString(
"PreferredBlockSize");
6683 json.WriteNumber(m_PreferredBlockSize);
6686 json.WriteString(
"Blocks");
6688 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6690 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6697 #endif // #if VMA_STATS_STRING_ENABLED 6699 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6700 VmaAllocator hAllocator,
6701 uint32_t currentFrameIndex)
6703 if(m_pDefragmentator == VMA_NULL)
6705 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6711 return m_pDefragmentator;
6714 VkResult VmaBlockVector::Defragment(
6716 VkDeviceSize& maxBytesToMove,
6717 uint32_t& maxAllocationsToMove)
6719 if(m_pDefragmentator == VMA_NULL)
6724 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6727 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6730 if(pDefragmentationStats != VMA_NULL)
6732 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6733 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6736 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6737 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6743 m_HasEmptyBlock =
false;
6744 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6746 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6747 if(pBlock->m_Metadata.IsEmpty())
6749 if(m_Blocks.size() > m_MinBlockCount)
6751 if(pDefragmentationStats != VMA_NULL)
6754 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6757 VmaVectorRemove(m_Blocks, blockIndex);
6758 pBlock->Destroy(m_hAllocator);
6759 vma_delete(m_hAllocator, pBlock);
6763 m_HasEmptyBlock =
true;
6771 void VmaBlockVector::DestroyDefragmentator()
6773 if(m_pDefragmentator != VMA_NULL)
6775 vma_delete(m_hAllocator, m_pDefragmentator);
6776 m_pDefragmentator = VMA_NULL;
6780 void VmaBlockVector::MakePoolAllocationsLost(
6781 uint32_t currentFrameIndex,
6782 size_t* pLostAllocationCount)
6784 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6785 size_t lostAllocationCount = 0;
6786 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6788 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6790 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6792 if(pLostAllocationCount != VMA_NULL)
6794 *pLostAllocationCount = lostAllocationCount;
6798 void VmaBlockVector::AddStats(
VmaStats* pStats)
6800 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6801 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6803 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6805 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6807 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6809 VMA_HEAVY_ASSERT(pBlock->Validate());
6811 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6812 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6813 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6814 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6821 VmaDefragmentator::VmaDefragmentator(
6822 VmaAllocator hAllocator,
6823 VmaBlockVector* pBlockVector,
6824 uint32_t currentFrameIndex) :
6825 m_hAllocator(hAllocator),
6826 m_pBlockVector(pBlockVector),
6827 m_CurrentFrameIndex(currentFrameIndex),
6829 m_AllocationsMoved(0),
6830 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6831 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6835 VmaDefragmentator::~VmaDefragmentator()
6837 for(
size_t i = m_Blocks.size(); i--; )
6839 vma_delete(m_hAllocator, m_Blocks[i]);
6843 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6845 AllocationInfo allocInfo;
6846 allocInfo.m_hAllocation = hAlloc;
6847 allocInfo.m_pChanged = pChanged;
6848 m_Allocations.push_back(allocInfo);
6851 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6854 if(m_pMappedDataForDefragmentation)
6856 *ppMappedData = m_pMappedDataForDefragmentation;
6861 if(m_pBlock->m_Mapping.GetMappedData())
6863 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6868 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
6869 *ppMappedData = m_pMappedDataForDefragmentation;
6873 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6875 if(m_pMappedDataForDefragmentation != VMA_NULL)
6877 m_pBlock->Unmap(hAllocator, 1);
6881 VkResult VmaDefragmentator::DefragmentRound(
6882 VkDeviceSize maxBytesToMove,
6883 uint32_t maxAllocationsToMove)
6885 if(m_Blocks.empty())
6890 size_t srcBlockIndex = m_Blocks.size() - 1;
6891 size_t srcAllocIndex = SIZE_MAX;
6897 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6899 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6902 if(srcBlockIndex == 0)
6909 srcAllocIndex = SIZE_MAX;
6914 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6918 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6919 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6921 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6922 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6923 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6924 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6927 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6929 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6930 VmaAllocationRequest dstAllocRequest;
6931 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6932 m_CurrentFrameIndex,
6933 m_pBlockVector->GetFrameInUseCount(),
6934 m_pBlockVector->GetBufferImageGranularity(),
6939 &dstAllocRequest) &&
6941 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6943 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6946 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6947 (m_BytesMoved + size > maxBytesToMove))
6949 return VK_INCOMPLETE;
6952 void* pDstMappedData = VMA_NULL;
6953 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6954 if(res != VK_SUCCESS)
6959 void* pSrcMappedData = VMA_NULL;
6960 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6961 if(res != VK_SUCCESS)
6968 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6969 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6970 static_cast<size_t>(size));
6972 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6973 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
6975 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6977 if(allocInfo.m_pChanged != VMA_NULL)
6979 *allocInfo.m_pChanged = VK_TRUE;
6982 ++m_AllocationsMoved;
6983 m_BytesMoved += size;
6985 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6993 if(srcAllocIndex > 0)
6999 if(srcBlockIndex > 0)
7002 srcAllocIndex = SIZE_MAX;
7012 VkResult VmaDefragmentator::Defragment(
7013 VkDeviceSize maxBytesToMove,
7014 uint32_t maxAllocationsToMove)
7016 if(m_Allocations.empty())
7022 const size_t blockCount = m_pBlockVector->m_Blocks.size();
7023 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7025 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7026 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7027 m_Blocks.push_back(pBlockInfo);
7031 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7034 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7036 AllocationInfo& allocInfo = m_Allocations[blockIndex];
7038 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7040 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7041 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7042 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7044 (*it)->m_Allocations.push_back(allocInfo);
7052 m_Allocations.clear();
7054 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7056 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7057 pBlockInfo->CalcHasNonMovableAllocations();
7058 pBlockInfo->SortAllocationsBySizeDescecnding();
7062 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7065 VkResult result = VK_SUCCESS;
7066 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7068 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7072 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7074 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7080 bool VmaDefragmentator::MoveMakesSense(
7081 size_t dstBlockIndex, VkDeviceSize dstOffset,
7082 size_t srcBlockIndex, VkDeviceSize srcOffset)
7084 if(dstBlockIndex < srcBlockIndex)
7088 if(dstBlockIndex > srcBlockIndex)
7092 if(dstOffset < srcOffset)
7105 m_hDevice(pCreateInfo->device),
7106 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7107 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7108 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7109 m_PreferredLargeHeapBlockSize(0),
7110 m_PhysicalDevice(pCreateInfo->physicalDevice),
7111 m_CurrentFrameIndex(0),
7112 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
7116 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7117 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7118 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7120 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7121 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7123 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7125 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7136 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7137 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7144 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7146 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7147 if(limit != VK_WHOLE_SIZE)
7149 m_HeapSizeLimit[heapIndex] = limit;
7150 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7152 m_MemProps.memoryHeaps[heapIndex].size = limit;
7158 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7160 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7162 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7168 GetBufferImageGranularity(),
7173 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7177 VmaAllocator_T::~VmaAllocator_T()
7179 VMA_ASSERT(m_Pools.empty());
7181 for(
size_t i = GetMemoryTypeCount(); i--; )
7183 vma_delete(
this, m_pDedicatedAllocations[i]);
7184 vma_delete(
this, m_pBlockVectors[i]);
7188 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7190 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7191 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7192 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7193 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7194 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7195 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7196 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7197 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7198 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7199 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7200 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7201 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7202 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7203 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7204 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7205 if(m_UseKhrDedicatedAllocation)
7207 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7208 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7209 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7210 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7212 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7214 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7215 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7217 if(pVulkanFunctions != VMA_NULL)
7219 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7220 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7221 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7222 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7223 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7224 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7225 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7226 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7227 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7228 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7229 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7230 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7231 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7232 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7233 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7234 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7237 #undef VMA_COPY_IF_NOT_NULL 7241 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7242 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7243 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7244 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7245 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7246 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7247 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7248 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7249 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7250 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7251 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7252 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7253 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7254 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7255 if(m_UseKhrDedicatedAllocation)
7257 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7258 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7262 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7264 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7265 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7266 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7267 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7270 VkResult VmaAllocator_T::AllocateMemoryOfType(
7271 const VkMemoryRequirements& vkMemReq,
7272 bool dedicatedAllocation,
7273 VkBuffer dedicatedBuffer,
7274 VkImage dedicatedImage,
7276 uint32_t memTypeIndex,
7277 VmaSuballocationType suballocType,
7278 VmaAllocation* pAllocation)
7280 VMA_ASSERT(pAllocation != VMA_NULL);
7281 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7287 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7292 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7293 VMA_ASSERT(blockVector);
7295 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7296 bool preferDedicatedMemory =
7297 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7298 dedicatedAllocation ||
7300 vkMemReq.size > preferredBlockSize / 2;
7302 if(preferDedicatedMemory &&
7304 finalCreateInfo.
pool == VK_NULL_HANDLE)
7313 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7317 return AllocateDedicatedMemory(
7331 VkResult res = blockVector->Allocate(
7333 m_CurrentFrameIndex.load(),
7338 if(res == VK_SUCCESS)
7346 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7350 res = AllocateDedicatedMemory(
7356 finalCreateInfo.pUserData,
7360 if(res == VK_SUCCESS)
7363 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7369 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7376 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7378 VmaSuballocationType suballocType,
7379 uint32_t memTypeIndex,
7381 bool isUserDataString,
7383 VkBuffer dedicatedBuffer,
7384 VkImage dedicatedImage,
7385 VmaAllocation* pAllocation)
7387 VMA_ASSERT(pAllocation);
7389 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7390 allocInfo.memoryTypeIndex = memTypeIndex;
7391 allocInfo.allocationSize = size;
7393 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7394 if(m_UseKhrDedicatedAllocation)
7396 if(dedicatedBuffer != VK_NULL_HANDLE)
7398 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7399 dedicatedAllocInfo.buffer = dedicatedBuffer;
7400 allocInfo.pNext = &dedicatedAllocInfo;
7402 else if(dedicatedImage != VK_NULL_HANDLE)
7404 dedicatedAllocInfo.image = dedicatedImage;
7405 allocInfo.pNext = &dedicatedAllocInfo;
7410 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7411 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7414 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7418 void* pMappedData = VMA_NULL;
7421 res = (*m_VulkanFunctions.vkMapMemory)(
7430 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7431 FreeVulkanMemory(memTypeIndex, size, hMemory);
7436 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7437 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7438 (*pAllocation)->SetUserData(
this, pUserData);
7442 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7443 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7444 VMA_ASSERT(pDedicatedAllocations);
7445 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7448 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7453 void VmaAllocator_T::GetBufferMemoryRequirements(
7455 VkMemoryRequirements& memReq,
7456 bool& requiresDedicatedAllocation,
7457 bool& prefersDedicatedAllocation)
const 7459 if(m_UseKhrDedicatedAllocation)
7461 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7462 memReqInfo.buffer = hBuffer;
7464 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7466 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7467 memReq2.pNext = &memDedicatedReq;
7469 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7471 memReq = memReq2.memoryRequirements;
7472 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7473 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7477 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7478 requiresDedicatedAllocation =
false;
7479 prefersDedicatedAllocation =
false;
7483 void VmaAllocator_T::GetImageMemoryRequirements(
7485 VkMemoryRequirements& memReq,
7486 bool& requiresDedicatedAllocation,
7487 bool& prefersDedicatedAllocation)
const 7489 if(m_UseKhrDedicatedAllocation)
7491 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7492 memReqInfo.image = hImage;
7494 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7496 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7497 memReq2.pNext = &memDedicatedReq;
7499 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7501 memReq = memReq2.memoryRequirements;
7502 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7503 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7507 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7508 requiresDedicatedAllocation =
false;
7509 prefersDedicatedAllocation =
false;
7513 VkResult VmaAllocator_T::AllocateMemory(
7514 const VkMemoryRequirements& vkMemReq,
7515 bool requiresDedicatedAllocation,
7516 bool prefersDedicatedAllocation,
7517 VkBuffer dedicatedBuffer,
7518 VkImage dedicatedImage,
7520 VmaSuballocationType suballocType,
7521 VmaAllocation* pAllocation)
7526 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7527 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7532 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7533 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7535 if(requiresDedicatedAllocation)
7539 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7540 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7542 if(createInfo.
pool != VK_NULL_HANDLE)
7544 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7545 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7548 if((createInfo.
pool != VK_NULL_HANDLE) &&
7551 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7552 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7555 if(createInfo.
pool != VK_NULL_HANDLE)
7557 return createInfo.
pool->m_BlockVector.Allocate(
7559 m_CurrentFrameIndex.load(),
7568 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7569 uint32_t memTypeIndex = UINT32_MAX;
7571 if(res == VK_SUCCESS)
7573 res = AllocateMemoryOfType(
7575 requiresDedicatedAllocation || prefersDedicatedAllocation,
7583 if(res == VK_SUCCESS)
7593 memoryTypeBits &= ~(1u << memTypeIndex);
7596 if(res == VK_SUCCESS)
7598 res = AllocateMemoryOfType(
7600 requiresDedicatedAllocation || prefersDedicatedAllocation,
7608 if(res == VK_SUCCESS)
7618 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7629 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7631 VMA_ASSERT(allocation);
7633 if(allocation->CanBecomeLost() ==
false ||
7634 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7636 switch(allocation->GetType())
7638 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7640 VmaBlockVector* pBlockVector = VMA_NULL;
7641 VmaPool hPool = allocation->GetPool();
7642 if(hPool != VK_NULL_HANDLE)
7644 pBlockVector = &hPool->m_BlockVector;
7648 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7649 pBlockVector = m_pBlockVectors[memTypeIndex];
7651 pBlockVector->Free(allocation);
7654 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7655 FreeDedicatedMemory(allocation);
7662 allocation->SetUserData(
this, VMA_NULL);
7663 vma_delete(
this, allocation);
7666 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7669 InitStatInfo(pStats->
total);
7670 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7672 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7676 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7678 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7679 VMA_ASSERT(pBlockVector);
7680 pBlockVector->AddStats(pStats);
7685 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7686 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7688 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7693 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7695 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7696 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7697 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7698 VMA_ASSERT(pDedicatedAllocVector);
7699 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7702 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7703 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7704 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7705 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7710 VmaPostprocessCalcStatInfo(pStats->
total);
7711 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7712 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7713 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7714 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7717 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7719 VkResult VmaAllocator_T::Defragment(
7720 VmaAllocation* pAllocations,
7721 size_t allocationCount,
7722 VkBool32* pAllocationsChanged,
7726 if(pAllocationsChanged != VMA_NULL)
7728 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7730 if(pDefragmentationStats != VMA_NULL)
7732 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7735 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7737 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7739 const size_t poolCount = m_Pools.size();
7742 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7744 VmaAllocation hAlloc = pAllocations[allocIndex];
7746 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7748 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7750 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7752 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7754 VmaBlockVector* pAllocBlockVector = VMA_NULL;
7756 const VmaPool hAllocPool = hAlloc->GetPool();
7758 if(hAllocPool != VK_NULL_HANDLE)
7760 pAllocBlockVector = &hAllocPool->GetBlockVector();
7765 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7768 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7770 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7771 &pAllocationsChanged[allocIndex] : VMA_NULL;
7772 pDefragmentator->AddAllocation(hAlloc, pChanged);
7776 VkResult result = VK_SUCCESS;
7780 VkDeviceSize maxBytesToMove = SIZE_MAX;
7781 uint32_t maxAllocationsToMove = UINT32_MAX;
7782 if(pDefragmentationInfo != VMA_NULL)
7789 for(uint32_t memTypeIndex = 0;
7790 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7794 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7796 result = m_pBlockVectors[memTypeIndex]->Defragment(
7797 pDefragmentationStats,
7799 maxAllocationsToMove);
7804 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7806 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7807 pDefragmentationStats,
7809 maxAllocationsToMove);
7815 for(
size_t poolIndex = poolCount; poolIndex--; )
7817 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7821 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7823 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7825 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7832 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7834 if(hAllocation->CanBecomeLost())
7840 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7841 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7844 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7848 pAllocationInfo->
offset = 0;
7849 pAllocationInfo->
size = hAllocation->GetSize();
7851 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7854 else if(localLastUseFrameIndex == localCurrFrameIndex)
7856 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7857 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7858 pAllocationInfo->
offset = hAllocation->GetOffset();
7859 pAllocationInfo->
size = hAllocation->GetSize();
7861 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7866 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7868 localLastUseFrameIndex = localCurrFrameIndex;
7875 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7876 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7877 pAllocationInfo->
offset = hAllocation->GetOffset();
7878 pAllocationInfo->
size = hAllocation->GetSize();
7879 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7880 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7884 bool VmaAllocator_T::TouchAllocation(VmaAllocation hAllocation)
7887 if(hAllocation->CanBecomeLost())
7889 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7890 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7893 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7897 else if(localLastUseFrameIndex == localCurrFrameIndex)
7903 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7905 localLastUseFrameIndex = localCurrFrameIndex;
7916 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7918 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7931 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7933 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7934 if(res != VK_SUCCESS)
7936 vma_delete(
this, *pPool);
7943 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7944 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7950 void VmaAllocator_T::DestroyPool(VmaPool pool)
7954 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7955 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7956 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7959 vma_delete(
this, pool);
7962 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7964 pool->m_BlockVector.GetPoolStats(pPoolStats);
7967 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7969 m_CurrentFrameIndex.store(frameIndex);
7972 void VmaAllocator_T::MakePoolAllocationsLost(
7974 size_t* pLostAllocationCount)
7976 hPool->m_BlockVector.MakePoolAllocationsLost(
7977 m_CurrentFrameIndex.load(),
7978 pLostAllocationCount);
7981 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7983 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7984 (*pAllocation)->InitLost();
7987 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7989 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7992 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7994 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7995 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7997 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7998 if(res == VK_SUCCESS)
8000 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
8005 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
8010 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8013 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
8015 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
8021 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
8023 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
8025 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
8028 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8030 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8031 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8033 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8034 m_HeapSizeLimit[heapIndex] += size;
8038 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
8040 if(hAllocation->CanBecomeLost())
8042 return VK_ERROR_MEMORY_MAP_FAILED;
8045 switch(hAllocation->GetType())
8047 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8049 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8050 char *pBytes = VMA_NULL;
8051 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
8052 if(res == VK_SUCCESS)
8054 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8055 hAllocation->BlockAllocMap();
8059 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8060 return hAllocation->DedicatedAllocMap(
this, ppData);
8063 return VK_ERROR_MEMORY_MAP_FAILED;
8067 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
8069 switch(hAllocation->GetType())
8071 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8073 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8074 hAllocation->BlockAllocUnmap();
8075 pBlock->Unmap(
this, 1);
8078 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8079 hAllocation->DedicatedAllocUnmap(
this);
8086 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
8088 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8090 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8092 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8093 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8094 VMA_ASSERT(pDedicatedAllocations);
8095 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8096 VMA_ASSERT(success);
8099 VkDeviceMemory hMemory = allocation->GetMemory();
8101 if(allocation->GetMappedData() != VMA_NULL)
8103 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8106 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8108 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8111 #if VMA_STATS_STRING_ENABLED 8113 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8115 bool dedicatedAllocationsStarted =
false;
8116 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8118 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8119 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8120 VMA_ASSERT(pDedicatedAllocVector);
8121 if(pDedicatedAllocVector->empty() ==
false)
8123 if(dedicatedAllocationsStarted ==
false)
8125 dedicatedAllocationsStarted =
true;
8126 json.WriteString(
"DedicatedAllocations");
8130 json.BeginString(
"Type ");
8131 json.ContinueString(memTypeIndex);
8136 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8138 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
8139 json.BeginObject(
true);
8141 json.WriteString(
"Type");
8142 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
8144 json.WriteString(
"Size");
8145 json.WriteNumber(hAlloc->GetSize());
8147 const void* pUserData = hAlloc->GetUserData();
8148 if(pUserData != VMA_NULL)
8150 json.WriteString(
"UserData");
8151 if(hAlloc->IsUserDataString())
8153 json.WriteString((
const char*)pUserData);
8158 json.ContinueString_Pointer(pUserData);
8169 if(dedicatedAllocationsStarted)
8175 bool allocationsStarted =
false;
8176 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8178 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8180 if(allocationsStarted ==
false)
8182 allocationsStarted =
true;
8183 json.WriteString(
"DefaultPools");
8187 json.BeginString(
"Type ");
8188 json.ContinueString(memTypeIndex);
8191 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8194 if(allocationsStarted)
8201 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8202 const size_t poolCount = m_Pools.size();
8205 json.WriteString(
"Pools");
8207 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8209 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8216 #endif // #if VMA_STATS_STRING_ENABLED 8218 static VkResult AllocateMemoryForImage(
8219 VmaAllocator allocator,
8222 VmaSuballocationType suballocType,
8223 VmaAllocation* pAllocation)
8225 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8227 VkMemoryRequirements vkMemReq = {};
8228 bool requiresDedicatedAllocation =
false;
8229 bool prefersDedicatedAllocation =
false;
8230 allocator->GetImageMemoryRequirements(image, vkMemReq,
8231 requiresDedicatedAllocation, prefersDedicatedAllocation);
8233 return allocator->AllocateMemory(
8235 requiresDedicatedAllocation,
8236 prefersDedicatedAllocation,
8239 *pAllocationCreateInfo,
8249 VmaAllocator* pAllocator)
8251 VMA_ASSERT(pCreateInfo && pAllocator);
8252 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8258 VmaAllocator allocator)
8260 if(allocator != VK_NULL_HANDLE)
8262 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8263 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8264 vma_delete(&allocationCallbacks, allocator);
8269 VmaAllocator allocator,
8270 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8272 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8273 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8277 VmaAllocator allocator,
8278 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8280 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8281 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8285 VmaAllocator allocator,
8286 uint32_t memoryTypeIndex,
8287 VkMemoryPropertyFlags* pFlags)
8289 VMA_ASSERT(allocator && pFlags);
8290 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8291 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8295 VmaAllocator allocator,
8296 uint32_t frameIndex)
8298 VMA_ASSERT(allocator);
8299 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8301 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8303 allocator->SetCurrentFrameIndex(frameIndex);
8307 VmaAllocator allocator,
8310 VMA_ASSERT(allocator && pStats);
8311 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8312 allocator->CalculateStats(pStats);
8315 #if VMA_STATS_STRING_ENABLED 8318 VmaAllocator allocator,
8319 char** ppStatsString,
8320 VkBool32 detailedMap)
8322 VMA_ASSERT(allocator && ppStatsString);
8323 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8325 VmaStringBuilder sb(allocator);
8327 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8331 allocator->CalculateStats(&stats);
8333 json.WriteString(
"Total");
8334 VmaPrintStatInfo(json, stats.
total);
8336 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8338 json.BeginString(
"Heap ");
8339 json.ContinueString(heapIndex);
8343 json.WriteString(
"Size");
8344 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8346 json.WriteString(
"Flags");
8347 json.BeginArray(
true);
8348 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8350 json.WriteString(
"DEVICE_LOCAL");
8356 json.WriteString(
"Stats");
8357 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8360 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8362 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8364 json.BeginString(
"Type ");
8365 json.ContinueString(typeIndex);
8370 json.WriteString(
"Flags");
8371 json.BeginArray(
true);
8372 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8373 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8375 json.WriteString(
"DEVICE_LOCAL");
8377 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8379 json.WriteString(
"HOST_VISIBLE");
8381 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8383 json.WriteString(
"HOST_COHERENT");
8385 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8387 json.WriteString(
"HOST_CACHED");
8389 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8391 json.WriteString(
"LAZILY_ALLOCATED");
8397 json.WriteString(
"Stats");
8398 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8407 if(detailedMap == VK_TRUE)
8409 allocator->PrintDetailedMap(json);
8415 const size_t len = sb.GetLength();
8416 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8419 memcpy(pChars, sb.GetData(), len);
8422 *ppStatsString = pChars;
8426 VmaAllocator allocator,
8429 if(pStatsString != VMA_NULL)
8431 VMA_ASSERT(allocator);
8432 size_t len = strlen(pStatsString);
8433 vma_delete_array(allocator, pStatsString, len + 1);
8437 #endif // #if VMA_STATS_STRING_ENABLED 8443 VmaAllocator allocator,
8444 uint32_t memoryTypeBits,
8446 uint32_t* pMemoryTypeIndex)
8448 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8449 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8450 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8457 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8461 switch(pAllocationCreateInfo->
usage)
8466 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8469 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8472 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8473 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8476 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8477 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8483 *pMemoryTypeIndex = UINT32_MAX;
8484 uint32_t minCost = UINT32_MAX;
8485 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8486 memTypeIndex < allocator->GetMemoryTypeCount();
8487 ++memTypeIndex, memTypeBit <<= 1)
8490 if((memTypeBit & memoryTypeBits) != 0)
8492 const VkMemoryPropertyFlags currFlags =
8493 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8495 if((requiredFlags & ~currFlags) == 0)
8498 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8500 if(currCost < minCost)
8502 *pMemoryTypeIndex = memTypeIndex;
8512 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8516 VmaAllocator allocator,
8517 const VkBufferCreateInfo* pBufferCreateInfo,
8519 uint32_t* pMemoryTypeIndex)
8521 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8522 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8523 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8524 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8526 const VkDevice hDev = allocator->m_hDevice;
8527 VkBuffer hBuffer = VK_NULL_HANDLE;
8528 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
8529 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
8530 if(res == VK_SUCCESS)
8532 VkMemoryRequirements memReq = {};
8533 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
8534 hDev, hBuffer, &memReq);
8538 memReq.memoryTypeBits,
8539 pAllocationCreateInfo,
8542 allocator->GetVulkanFunctions().vkDestroyBuffer(
8543 hDev, hBuffer, allocator->GetAllocationCallbacks());
8549 VmaAllocator allocator,
8550 const VkImageCreateInfo* pImageCreateInfo,
8552 uint32_t* pMemoryTypeIndex)
8554 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8555 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
8556 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8557 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8559 const VkDevice hDev = allocator->m_hDevice;
8560 VkImage hImage = VK_NULL_HANDLE;
8561 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
8562 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
8563 if(res == VK_SUCCESS)
8565 VkMemoryRequirements memReq = {};
8566 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
8567 hDev, hImage, &memReq);
8571 memReq.memoryTypeBits,
8572 pAllocationCreateInfo,
8575 allocator->GetVulkanFunctions().vkDestroyImage(
8576 hDev, hImage, allocator->GetAllocationCallbacks());
8582 VmaAllocator allocator,
8586 VMA_ASSERT(allocator && pCreateInfo && pPool);
8588 VMA_DEBUG_LOG(
"vmaCreatePool");
8590 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8592 return allocator->CreatePool(pCreateInfo, pPool);
8596 VmaAllocator allocator,
8599 VMA_ASSERT(allocator);
8601 if(pool == VK_NULL_HANDLE)
8606 VMA_DEBUG_LOG(
"vmaDestroyPool");
8608 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8610 allocator->DestroyPool(pool);
8614 VmaAllocator allocator,
8618 VMA_ASSERT(allocator && pool && pPoolStats);
8620 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8622 allocator->GetPoolStats(pool, pPoolStats);
8626 VmaAllocator allocator,
8628 size_t* pLostAllocationCount)
8630 VMA_ASSERT(allocator && pool);
8632 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8634 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8638 VmaAllocator allocator,
8639 const VkMemoryRequirements* pVkMemoryRequirements,
8641 VmaAllocation* pAllocation,
8644 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8646 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8648 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8650 VkResult result = allocator->AllocateMemory(
8651 *pVkMemoryRequirements,
8657 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8660 if(pAllocationInfo && result == VK_SUCCESS)
8662 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8669 VmaAllocator allocator,
8672 VmaAllocation* pAllocation,
8675 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8677 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8679 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8681 VkMemoryRequirements vkMemReq = {};
8682 bool requiresDedicatedAllocation =
false;
8683 bool prefersDedicatedAllocation =
false;
8684 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8685 requiresDedicatedAllocation,
8686 prefersDedicatedAllocation);
8688 VkResult result = allocator->AllocateMemory(
8690 requiresDedicatedAllocation,
8691 prefersDedicatedAllocation,
8695 VMA_SUBALLOCATION_TYPE_BUFFER,
8698 if(pAllocationInfo && result == VK_SUCCESS)
8700 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8707 VmaAllocator allocator,
8710 VmaAllocation* pAllocation,
8713 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8715 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8717 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8719 VkResult result = AllocateMemoryForImage(
8723 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8726 if(pAllocationInfo && result == VK_SUCCESS)
8728 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8735 VmaAllocator allocator,
8736 VmaAllocation allocation)
8738 VMA_ASSERT(allocator && allocation);
8740 VMA_DEBUG_LOG(
"vmaFreeMemory");
8742 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8744 allocator->FreeMemory(allocation);
8748 VmaAllocator allocator,
8749 VmaAllocation allocation,
8752 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8754 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8756 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8760 VmaAllocator allocator,
8761 VmaAllocation allocation)
8763 VMA_ASSERT(allocator && allocation);
8765 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8767 return allocator->TouchAllocation(allocation);
8771 VmaAllocator allocator,
8772 VmaAllocation allocation,
8775 VMA_ASSERT(allocator && allocation);
8777 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8779 allocation->SetUserData(allocator, pUserData);
8783 VmaAllocator allocator,
8784 VmaAllocation* pAllocation)
8786 VMA_ASSERT(allocator && pAllocation);
8788 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8790 allocator->CreateLostAllocation(pAllocation);
8794 VmaAllocator allocator,
8795 VmaAllocation allocation,
8798 VMA_ASSERT(allocator && allocation && ppData);
8800 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8802 return allocator->Map(allocation, ppData);
8806 VmaAllocator allocator,
8807 VmaAllocation allocation)
8809 VMA_ASSERT(allocator && allocation);
8811 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8813 allocator->Unmap(allocation);
8817 VmaAllocator allocator,
8818 VmaAllocation* pAllocations,
8819 size_t allocationCount,
8820 VkBool32* pAllocationsChanged,
8824 VMA_ASSERT(allocator && pAllocations);
8826 VMA_DEBUG_LOG(
"vmaDefragment");
8828 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8830 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8834 VmaAllocator allocator,
8835 const VkBufferCreateInfo* pBufferCreateInfo,
8838 VmaAllocation* pAllocation,
8841 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8843 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8845 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8847 *pBuffer = VK_NULL_HANDLE;
8848 *pAllocation = VK_NULL_HANDLE;
8851 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8852 allocator->m_hDevice,
8854 allocator->GetAllocationCallbacks(),
8859 VkMemoryRequirements vkMemReq = {};
8860 bool requiresDedicatedAllocation =
false;
8861 bool prefersDedicatedAllocation =
false;
8862 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8863 requiresDedicatedAllocation, prefersDedicatedAllocation);
8867 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8869 VMA_ASSERT(vkMemReq.alignment %
8870 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8872 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8874 VMA_ASSERT(vkMemReq.alignment %
8875 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8877 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8879 VMA_ASSERT(vkMemReq.alignment %
8880 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8884 res = allocator->AllocateMemory(
8886 requiresDedicatedAllocation,
8887 prefersDedicatedAllocation,
8890 *pAllocationCreateInfo,
8891 VMA_SUBALLOCATION_TYPE_BUFFER,
8896 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8897 allocator->m_hDevice,
8899 (*pAllocation)->GetMemory(),
8900 (*pAllocation)->GetOffset());
8904 if(pAllocationInfo != VMA_NULL)
8906 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8910 allocator->FreeMemory(*pAllocation);
8911 *pAllocation = VK_NULL_HANDLE;
8912 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8913 *pBuffer = VK_NULL_HANDLE;
8916 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8917 *pBuffer = VK_NULL_HANDLE;
8924 VmaAllocator allocator,
8926 VmaAllocation allocation)
8928 if(buffer != VK_NULL_HANDLE)
8930 VMA_ASSERT(allocator);
8932 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8934 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8936 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8938 allocator->FreeMemory(allocation);
8943 VmaAllocator allocator,
8944 const VkImageCreateInfo* pImageCreateInfo,
8947 VmaAllocation* pAllocation,
8950 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8952 VMA_DEBUG_LOG(
"vmaCreateImage");
8954 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8956 *pImage = VK_NULL_HANDLE;
8957 *pAllocation = VK_NULL_HANDLE;
8960 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8961 allocator->m_hDevice,
8963 allocator->GetAllocationCallbacks(),
8967 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8968 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8969 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8972 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8976 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8977 allocator->m_hDevice,
8979 (*pAllocation)->GetMemory(),
8980 (*pAllocation)->GetOffset());
8984 if(pAllocationInfo != VMA_NULL)
8986 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8990 allocator->FreeMemory(*pAllocation);
8991 *pAllocation = VK_NULL_HANDLE;
8992 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8993 *pImage = VK_NULL_HANDLE;
8996 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8997 *pImage = VK_NULL_HANDLE;
9004 VmaAllocator allocator,
9006 VmaAllocation allocation)
9008 if(image != VK_NULL_HANDLE)
9010 VMA_ASSERT(allocator);
9012 VMA_DEBUG_LOG(
"vmaDestroyImage");
9014 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9016 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
9018 allocator->FreeMemory(allocation);
9022 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:963
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1217
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:964
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:988
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:949
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:973
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1150
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:943
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1499
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:961
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1698
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1369
+
Definition: vk_mem_alloc.h:1174
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:967
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1523
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:985
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1722
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1393
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1423
-
Definition: vk_mem_alloc.h:1230
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:932
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1268
-
Definition: vk_mem_alloc.h:1177
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:973
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1447
+
Definition: vk_mem_alloc.h:1254
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:956
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1292
+
Definition: vk_mem_alloc.h:1201
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:997
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1026
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:958
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1050
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:982
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1181
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1205
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1091
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:946
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1090
-
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:954
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1702
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1115
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:970
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1114
+
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:978
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1726
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:990
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1100
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1710
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1252
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1693
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:947
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:874
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:967
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1377
-
Definition: vk_mem_alloc.h:1371
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1509
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1014
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1124
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1734
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1276
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1717
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:971
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:898
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:991
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1401
+
Definition: vk_mem_alloc.h:1395
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1533
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:944
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1289
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1393
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1429
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:968
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1313
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1417
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1453
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:930
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1380
+
Definition: vk_mem_alloc.h:954
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1404
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1128
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1152
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1688
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1712
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1706
-
Definition: vk_mem_alloc.h:1167
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1276
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:945
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1730
+
Definition: vk_mem_alloc.h:1191
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1300
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:969
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1096
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:880
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1120
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:904
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:901
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:925
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:906
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1708
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:930
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1732
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1263
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1439
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1287
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1463
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:940
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1079
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1388
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:893
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:964
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1103
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1412
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:917
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1237
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1092
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:897
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1383
-
Definition: vk_mem_alloc.h:1176
+
Definition: vk_mem_alloc.h:1261
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1116
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:921
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1407
+
Definition: vk_mem_alloc.h:1200
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1258
-
Definition: vk_mem_alloc.h:1249
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1282
+
Definition: vk_mem_alloc.h:1273
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1082
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:942
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1401
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:976
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1432
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1247
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1282
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1106
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:966
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1425
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1000
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1456
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1271
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1306
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1014
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1098
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1217
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1091
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1038
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1122
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1241
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1115
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:951
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:895
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:950
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:975
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:919
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:974
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1415
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1439
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1523
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:970
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1091
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1088
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1547
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:994
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1115
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1112
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1420
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1504
-
Definition: vk_mem_alloc.h:1245
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1704
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:938
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1444
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1528
+
Definition: vk_mem_alloc.h:1269
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1728
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:962
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:953
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1086
-
Definition: vk_mem_alloc.h:1133
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1373
+
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:977
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1110
+
Definition: vk_mem_alloc.h:1157
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1397
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1084
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:948
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:952
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1204
-
Definition: vk_mem_alloc.h:1160
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1518
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1108
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:972
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:976
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1228
+
Definition: vk_mem_alloc.h:1184
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1542
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:928
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:952
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:941
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1485
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:965
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1509
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1351
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1092
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1375
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1116
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
-
Definition: vk_mem_alloc.h:1243
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1099
+
Definition: vk_mem_alloc.h:1267
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1123
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1426
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1092
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1490
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1450
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1116
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1514