23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 934 #include <vulkan/vulkan.h> 936 VK_DEFINE_HANDLE(VmaAllocator)
940 VmaAllocator allocator,
942 VkDeviceMemory memory,
946 VmaAllocator allocator,
948 VkDeviceMemory memory,
1097 VmaAllocator* pAllocator);
1101 VmaAllocator allocator);
1108 VmaAllocator allocator,
1109 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1116 VmaAllocator allocator,
1117 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1126 VmaAllocator allocator,
1127 uint32_t memoryTypeIndex,
1128 VkMemoryPropertyFlags* pFlags);
1139 VmaAllocator allocator,
1140 uint32_t frameIndex);
1170 VmaAllocator allocator,
1173 #define VMA_STATS_STRING_ENABLED 1 1175 #if VMA_STATS_STRING_ENABLED 1181 VmaAllocator allocator,
1182 char** ppStatsString,
1183 VkBool32 detailedMap);
1186 VmaAllocator allocator,
1187 char* pStatsString);
1189 #endif // #if VMA_STATS_STRING_ENABLED 1191 VK_DEFINE_HANDLE(VmaPool)
1374 VmaAllocator allocator,
1375 uint32_t memoryTypeBits,
1377 uint32_t* pMemoryTypeIndex);
1392 VmaAllocator allocator,
1393 const VkBufferCreateInfo* pBufferCreateInfo,
1395 uint32_t* pMemoryTypeIndex);
1410 VmaAllocator allocator,
1411 const VkImageCreateInfo* pImageCreateInfo,
1413 uint32_t* pMemoryTypeIndex);
1514 VmaAllocator allocator,
1521 VmaAllocator allocator,
1531 VmaAllocator allocator,
1542 VmaAllocator allocator,
1544 size_t* pLostAllocationCount);
1546 VK_DEFINE_HANDLE(VmaAllocation)
1602 VmaAllocator allocator,
1603 const VkMemoryRequirements* pVkMemoryRequirements,
1605 VmaAllocation* pAllocation,
1615 VmaAllocator allocator,
1618 VmaAllocation* pAllocation,
1623 VmaAllocator allocator,
1626 VmaAllocation* pAllocation,
1631 VmaAllocator allocator,
1632 VmaAllocation allocation);
1651 VmaAllocator allocator,
1652 VmaAllocation allocation,
1670 VmaAllocator allocator,
1671 VmaAllocation allocation);
1687 VmaAllocator allocator,
1688 VmaAllocation allocation,
1702 VmaAllocator allocator,
1703 VmaAllocation* pAllocation);
1740 VmaAllocator allocator,
1741 VmaAllocation allocation,
1749 VmaAllocator allocator,
1750 VmaAllocation allocation);
1861 VmaAllocator allocator,
1862 VmaAllocation* pAllocations,
1863 size_t allocationCount,
1864 VkBool32* pAllocationsChanged,
1895 VmaAllocator allocator,
1896 const VkBufferCreateInfo* pBufferCreateInfo,
1899 VmaAllocation* pAllocation,
1914 VmaAllocator allocator,
1916 VmaAllocation allocation);
1920 VmaAllocator allocator,
1921 const VkImageCreateInfo* pImageCreateInfo,
1924 VmaAllocation* pAllocation,
1939 VmaAllocator allocator,
1941 VmaAllocation allocation);
1947 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1950 #ifdef __INTELLISENSE__ 1951 #define VMA_IMPLEMENTATION 1954 #ifdef VMA_IMPLEMENTATION 1955 #undef VMA_IMPLEMENTATION 1977 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 1978 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1990 #if VMA_USE_STL_CONTAINERS 1991 #define VMA_USE_STL_VECTOR 1 1992 #define VMA_USE_STL_UNORDERED_MAP 1 1993 #define VMA_USE_STL_LIST 1 1996 #if VMA_USE_STL_VECTOR 2000 #if VMA_USE_STL_UNORDERED_MAP 2001 #include <unordered_map> 2004 #if VMA_USE_STL_LIST 2013 #include <algorithm> 2017 #if !defined(_WIN32) && !defined(__APPLE__) 2023 #define VMA_NULL nullptr 2026 #if defined(__APPLE__) || defined(__ANDROID__) 2028 void *aligned_alloc(
size_t alignment,
size_t size)
2031 if(alignment <
sizeof(
void*))
2033 alignment =
sizeof(
void*);
2037 if(posix_memalign(&pointer, alignment, size) == 0)
2046 #define VMA_ASSERT(expr) assert(expr) 2048 #define VMA_ASSERT(expr) 2054 #ifndef VMA_HEAVY_ASSERT 2056 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2058 #define VMA_HEAVY_ASSERT(expr) 2062 #ifndef VMA_ALIGN_OF 2063 #define VMA_ALIGN_OF(type) (__alignof(type)) 2066 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2068 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2070 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2074 #ifndef VMA_SYSTEM_FREE 2076 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2078 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2083 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2087 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2091 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2095 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2098 #ifndef VMA_DEBUG_LOG 2099 #define VMA_DEBUG_LOG(format, ...) 2109 #if VMA_STATS_STRING_ENABLED 2110 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2112 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2114 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2116 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2118 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2120 snprintf(outStr, strLen,
"%p", ptr);
2130 void Lock() { m_Mutex.lock(); }
2131 void Unlock() { m_Mutex.unlock(); }
2135 #define VMA_MUTEX VmaMutex 2146 #ifndef VMA_ATOMIC_UINT32 2147 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2150 #ifndef VMA_BEST_FIT 2163 #define VMA_BEST_FIT (1) 2166 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2171 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2174 #ifndef VMA_DEBUG_ALIGNMENT 2179 #define VMA_DEBUG_ALIGNMENT (1) 2182 #ifndef VMA_DEBUG_MARGIN 2187 #define VMA_DEBUG_MARGIN (0) 2190 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2195 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2198 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2203 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2206 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2207 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2211 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2212 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2216 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2222 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2223 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2226 static inline uint32_t VmaCountBitsSet(uint32_t v)
2228 uint32_t c = v - ((v >> 1) & 0x55555555);
2229 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2230 c = ((c >> 4) + c) & 0x0F0F0F0F;
2231 c = ((c >> 8) + c) & 0x00FF00FF;
2232 c = ((c >> 16) + c) & 0x0000FFFF;
2238 template <
typename T>
2239 static inline T VmaAlignUp(T val, T align)
2241 return (val + align - 1) / align * align;
2245 template <
typename T>
2246 inline T VmaRoundDiv(T x, T y)
2248 return (x + (y / (T)2)) / y;
2253 template<
typename Iterator,
typename Compare>
2254 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2256 Iterator centerValue = end; --centerValue;
2257 Iterator insertIndex = beg;
2258 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2260 if(cmp(*memTypeIndex, *centerValue))
2262 if(insertIndex != memTypeIndex)
2264 VMA_SWAP(*memTypeIndex, *insertIndex);
2269 if(insertIndex != centerValue)
2271 VMA_SWAP(*insertIndex, *centerValue);
2276 template<
typename Iterator,
typename Compare>
2277 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2281 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2282 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2283 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2287 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2289 #endif // #ifndef VMA_SORT 2298 static inline bool VmaBlocksOnSamePage(
2299 VkDeviceSize resourceAOffset,
2300 VkDeviceSize resourceASize,
2301 VkDeviceSize resourceBOffset,
2302 VkDeviceSize pageSize)
2304 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2305 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2306 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2307 VkDeviceSize resourceBStart = resourceBOffset;
2308 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2309 return resourceAEndPage == resourceBStartPage;
2312 enum VmaSuballocationType
2314 VMA_SUBALLOCATION_TYPE_FREE = 0,
2315 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2316 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2317 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2318 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2319 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2320 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2329 static inline bool VmaIsBufferImageGranularityConflict(
2330 VmaSuballocationType suballocType1,
2331 VmaSuballocationType suballocType2)
2333 if(suballocType1 > suballocType2)
2335 VMA_SWAP(suballocType1, suballocType2);
2338 switch(suballocType1)
2340 case VMA_SUBALLOCATION_TYPE_FREE:
2342 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2344 case VMA_SUBALLOCATION_TYPE_BUFFER:
2346 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2347 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2348 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2350 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2351 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2352 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2353 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2355 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2356 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2368 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2369 m_pMutex(useMutex ? &mutex : VMA_NULL)
2386 VMA_MUTEX* m_pMutex;
2389 #if VMA_DEBUG_GLOBAL_MUTEX 2390 static VMA_MUTEX gDebugGlobalMutex;
2391 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2393 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2397 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2408 template <
typename IterT,
typename KeyT,
typename CmpT>
2409 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2411 size_t down = 0, up = (end - beg);
2414 const size_t mid = (down + up) / 2;
2415 if(cmp(*(beg+mid), key))
2430 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2432 if((pAllocationCallbacks != VMA_NULL) &&
2433 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2435 return (*pAllocationCallbacks->pfnAllocation)(
2436 pAllocationCallbacks->pUserData,
2439 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2443 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2447 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2449 if((pAllocationCallbacks != VMA_NULL) &&
2450 (pAllocationCallbacks->pfnFree != VMA_NULL))
2452 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2456 VMA_SYSTEM_FREE(ptr);
2460 template<
typename T>
2461 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2463 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2466 template<
typename T>
2467 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2469 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2472 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2474 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2476 template<
typename T>
2477 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2480 VmaFree(pAllocationCallbacks, ptr);
2483 template<
typename T>
2484 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2488 for(
size_t i = count; i--; )
2492 VmaFree(pAllocationCallbacks, ptr);
2497 template<
typename T>
2498 class VmaStlAllocator
2501 const VkAllocationCallbacks*
const m_pCallbacks;
2502 typedef T value_type;
2504 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2505 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2507 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2508 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2510 template<
typename U>
2511 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2513 return m_pCallbacks == rhs.m_pCallbacks;
2515 template<
typename U>
2516 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2518 return m_pCallbacks != rhs.m_pCallbacks;
2521 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2524 #if VMA_USE_STL_VECTOR 2526 #define VmaVector std::vector 2528 template<
typename T,
typename allocatorT>
2529 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2531 vec.insert(vec.begin() + index, item);
2534 template<
typename T,
typename allocatorT>
2535 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2537 vec.erase(vec.begin() + index);
2540 #else // #if VMA_USE_STL_VECTOR 2545 template<
typename T,
typename AllocatorT>
2549 typedef T value_type;
2551 VmaVector(
const AllocatorT& allocator) :
2552 m_Allocator(allocator),
2559 VmaVector(
size_t count,
const AllocatorT& allocator) :
2560 m_Allocator(allocator),
2561 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2567 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2568 m_Allocator(src.m_Allocator),
2569 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2570 m_Count(src.m_Count),
2571 m_Capacity(src.m_Count)
2575 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2581 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2584 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2588 resize(rhs.m_Count);
2591 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2597 bool empty()
const {
return m_Count == 0; }
2598 size_t size()
const {
return m_Count; }
2599 T* data() {
return m_pArray; }
2600 const T* data()
const {
return m_pArray; }
2602 T& operator[](
size_t index)
2604 VMA_HEAVY_ASSERT(index < m_Count);
2605 return m_pArray[index];
2607 const T& operator[](
size_t index)
const 2609 VMA_HEAVY_ASSERT(index < m_Count);
2610 return m_pArray[index];
2615 VMA_HEAVY_ASSERT(m_Count > 0);
2618 const T& front()
const 2620 VMA_HEAVY_ASSERT(m_Count > 0);
2625 VMA_HEAVY_ASSERT(m_Count > 0);
2626 return m_pArray[m_Count - 1];
2628 const T& back()
const 2630 VMA_HEAVY_ASSERT(m_Count > 0);
2631 return m_pArray[m_Count - 1];
2634 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2636 newCapacity = VMA_MAX(newCapacity, m_Count);
2638 if((newCapacity < m_Capacity) && !freeMemory)
2640 newCapacity = m_Capacity;
2643 if(newCapacity != m_Capacity)
2645 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2648 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2650 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2651 m_Capacity = newCapacity;
2652 m_pArray = newArray;
2656 void resize(
size_t newCount,
bool freeMemory =
false)
2658 size_t newCapacity = m_Capacity;
2659 if(newCount > m_Capacity)
2661 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2665 newCapacity = newCount;
2668 if(newCapacity != m_Capacity)
2670 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2671 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2672 if(elementsToCopy != 0)
2674 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2676 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2677 m_Capacity = newCapacity;
2678 m_pArray = newArray;
2684 void clear(
bool freeMemory =
false)
2686 resize(0, freeMemory);
2689 void insert(
size_t index,
const T& src)
2691 VMA_HEAVY_ASSERT(index <= m_Count);
2692 const size_t oldCount = size();
2693 resize(oldCount + 1);
2694 if(index < oldCount)
2696 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2698 m_pArray[index] = src;
2701 void remove(
size_t index)
2703 VMA_HEAVY_ASSERT(index < m_Count);
2704 const size_t oldCount = size();
2705 if(index < oldCount - 1)
2707 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2709 resize(oldCount - 1);
2712 void push_back(
const T& src)
2714 const size_t newIndex = size();
2715 resize(newIndex + 1);
2716 m_pArray[newIndex] = src;
2721 VMA_HEAVY_ASSERT(m_Count > 0);
2725 void push_front(
const T& src)
2732 VMA_HEAVY_ASSERT(m_Count > 0);
2736 typedef T* iterator;
2738 iterator begin() {
return m_pArray; }
2739 iterator end() {
return m_pArray + m_Count; }
2742 AllocatorT m_Allocator;
2748 template<
typename T,
typename allocatorT>
2749 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2751 vec.insert(index, item);
2754 template<
typename T,
typename allocatorT>
2755 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2760 #endif // #if VMA_USE_STL_VECTOR 2762 template<
typename CmpLess,
typename VectorT>
2763 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2765 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2767 vector.data() + vector.size(),
2769 CmpLess()) - vector.data();
2770 VmaVectorInsert(vector, indexToInsert, value);
2771 return indexToInsert;
2774 template<
typename CmpLess,
typename VectorT>
2775 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2778 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2783 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2785 size_t indexToRemove = it - vector.begin();
2786 VmaVectorRemove(vector, indexToRemove);
2792 template<
typename CmpLess,
typename VectorT>
2793 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2796 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2798 vector.data() + vector.size(),
2801 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2803 return it - vector.begin();
2807 return vector.size();
2819 template<
typename T>
2820 class VmaPoolAllocator
2823 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2824 ~VmaPoolAllocator();
2832 uint32_t NextFreeIndex;
2839 uint32_t FirstFreeIndex;
2842 const VkAllocationCallbacks* m_pAllocationCallbacks;
2843 size_t m_ItemsPerBlock;
2844 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2846 ItemBlock& CreateNewBlock();
2849 template<
typename T>
2850 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2851 m_pAllocationCallbacks(pAllocationCallbacks),
2852 m_ItemsPerBlock(itemsPerBlock),
2853 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2855 VMA_ASSERT(itemsPerBlock > 0);
2858 template<
typename T>
2859 VmaPoolAllocator<T>::~VmaPoolAllocator()
2864 template<
typename T>
2865 void VmaPoolAllocator<T>::Clear()
2867 for(
size_t i = m_ItemBlocks.size(); i--; )
2868 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2869 m_ItemBlocks.clear();
2872 template<
typename T>
2873 T* VmaPoolAllocator<T>::Alloc()
2875 for(
size_t i = m_ItemBlocks.size(); i--; )
2877 ItemBlock& block = m_ItemBlocks[i];
2879 if(block.FirstFreeIndex != UINT32_MAX)
2881 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2882 block.FirstFreeIndex = pItem->NextFreeIndex;
2883 return &pItem->Value;
2888 ItemBlock& newBlock = CreateNewBlock();
2889 Item*
const pItem = &newBlock.pItems[0];
2890 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2891 return &pItem->Value;
2894 template<
typename T>
2895 void VmaPoolAllocator<T>::Free(T* ptr)
2898 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2900 ItemBlock& block = m_ItemBlocks[i];
2904 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2907 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2909 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2910 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2911 block.FirstFreeIndex = index;
2915 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2918 template<
typename T>
2919 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2921 ItemBlock newBlock = {
2922 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2924 m_ItemBlocks.push_back(newBlock);
2927 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2928 newBlock.pItems[i].NextFreeIndex = i + 1;
2929 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2930 return m_ItemBlocks.back();
2936 #if VMA_USE_STL_LIST 2938 #define VmaList std::list 2940 #else // #if VMA_USE_STL_LIST 2942 template<
typename T>
2951 template<
typename T>
2955 typedef VmaListItem<T> ItemType;
2957 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2961 size_t GetCount()
const {
return m_Count; }
2962 bool IsEmpty()
const {
return m_Count == 0; }
2964 ItemType* Front() {
return m_pFront; }
2965 const ItemType* Front()
const {
return m_pFront; }
2966 ItemType* Back() {
return m_pBack; }
2967 const ItemType* Back()
const {
return m_pBack; }
2969 ItemType* PushBack();
2970 ItemType* PushFront();
2971 ItemType* PushBack(
const T& value);
2972 ItemType* PushFront(
const T& value);
2977 ItemType* InsertBefore(ItemType* pItem);
2979 ItemType* InsertAfter(ItemType* pItem);
2981 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2982 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2984 void Remove(ItemType* pItem);
2987 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2988 VmaPoolAllocator<ItemType> m_ItemAllocator;
2994 VmaRawList(
const VmaRawList<T>& src);
2995 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2998 template<
typename T>
2999 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3000 m_pAllocationCallbacks(pAllocationCallbacks),
3001 m_ItemAllocator(pAllocationCallbacks, 128),
3008 template<
typename T>
3009 VmaRawList<T>::~VmaRawList()
3015 template<
typename T>
3016 void VmaRawList<T>::Clear()
3018 if(IsEmpty() ==
false)
3020 ItemType* pItem = m_pBack;
3021 while(pItem != VMA_NULL)
3023 ItemType*
const pPrevItem = pItem->pPrev;
3024 m_ItemAllocator.Free(pItem);
3027 m_pFront = VMA_NULL;
3033 template<
typename T>
3034 VmaListItem<T>* VmaRawList<T>::PushBack()
3036 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3037 pNewItem->pNext = VMA_NULL;
3040 pNewItem->pPrev = VMA_NULL;
3041 m_pFront = pNewItem;
3047 pNewItem->pPrev = m_pBack;
3048 m_pBack->pNext = pNewItem;
3055 template<
typename T>
3056 VmaListItem<T>* VmaRawList<T>::PushFront()
3058 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3059 pNewItem->pPrev = VMA_NULL;
3062 pNewItem->pNext = VMA_NULL;
3063 m_pFront = pNewItem;
3069 pNewItem->pNext = m_pFront;
3070 m_pFront->pPrev = pNewItem;
3071 m_pFront = pNewItem;
3077 template<
typename T>
3078 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3080 ItemType*
const pNewItem = PushBack();
3081 pNewItem->Value = value;
3085 template<
typename T>
3086 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3088 ItemType*
const pNewItem = PushFront();
3089 pNewItem->Value = value;
3093 template<
typename T>
3094 void VmaRawList<T>::PopBack()
3096 VMA_HEAVY_ASSERT(m_Count > 0);
3097 ItemType*
const pBackItem = m_pBack;
3098 ItemType*
const pPrevItem = pBackItem->pPrev;
3099 if(pPrevItem != VMA_NULL)
3101 pPrevItem->pNext = VMA_NULL;
3103 m_pBack = pPrevItem;
3104 m_ItemAllocator.Free(pBackItem);
3108 template<
typename T>
3109 void VmaRawList<T>::PopFront()
3111 VMA_HEAVY_ASSERT(m_Count > 0);
3112 ItemType*
const pFrontItem = m_pFront;
3113 ItemType*
const pNextItem = pFrontItem->pNext;
3114 if(pNextItem != VMA_NULL)
3116 pNextItem->pPrev = VMA_NULL;
3118 m_pFront = pNextItem;
3119 m_ItemAllocator.Free(pFrontItem);
3123 template<
typename T>
3124 void VmaRawList<T>::Remove(ItemType* pItem)
3126 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3127 VMA_HEAVY_ASSERT(m_Count > 0);
3129 if(pItem->pPrev != VMA_NULL)
3131 pItem->pPrev->pNext = pItem->pNext;
3135 VMA_HEAVY_ASSERT(m_pFront == pItem);
3136 m_pFront = pItem->pNext;
3139 if(pItem->pNext != VMA_NULL)
3141 pItem->pNext->pPrev = pItem->pPrev;
3145 VMA_HEAVY_ASSERT(m_pBack == pItem);
3146 m_pBack = pItem->pPrev;
3149 m_ItemAllocator.Free(pItem);
3153 template<
typename T>
3154 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3156 if(pItem != VMA_NULL)
3158 ItemType*
const prevItem = pItem->pPrev;
3159 ItemType*
const newItem = m_ItemAllocator.Alloc();
3160 newItem->pPrev = prevItem;
3161 newItem->pNext = pItem;
3162 pItem->pPrev = newItem;
3163 if(prevItem != VMA_NULL)
3165 prevItem->pNext = newItem;
3169 VMA_HEAVY_ASSERT(m_pFront == pItem);
3179 template<
typename T>
3180 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3182 if(pItem != VMA_NULL)
3184 ItemType*
const nextItem = pItem->pNext;
3185 ItemType*
const newItem = m_ItemAllocator.Alloc();
3186 newItem->pNext = nextItem;
3187 newItem->pPrev = pItem;
3188 pItem->pNext = newItem;
3189 if(nextItem != VMA_NULL)
3191 nextItem->pPrev = newItem;
3195 VMA_HEAVY_ASSERT(m_pBack == pItem);
3205 template<
typename T>
3206 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3208 ItemType*
const newItem = InsertBefore(pItem);
3209 newItem->Value = value;
3213 template<
typename T>
3214 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3216 ItemType*
const newItem = InsertAfter(pItem);
3217 newItem->Value = value;
3221 template<
typename T,
typename AllocatorT>
3234 T& operator*()
const 3236 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3237 return m_pItem->Value;
3239 T* operator->()
const 3241 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3242 return &m_pItem->Value;
3245 iterator& operator++()
3247 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3248 m_pItem = m_pItem->pNext;
3251 iterator& operator--()
3253 if(m_pItem != VMA_NULL)
3255 m_pItem = m_pItem->pPrev;
3259 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3260 m_pItem = m_pList->Back();
3265 iterator operator++(
int)
3267 iterator result = *
this;
3271 iterator operator--(
int)
3273 iterator result = *
this;
3278 bool operator==(
const iterator& rhs)
const 3280 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3281 return m_pItem == rhs.m_pItem;
3283 bool operator!=(
const iterator& rhs)
const 3285 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3286 return m_pItem != rhs.m_pItem;
3290 VmaRawList<T>* m_pList;
3291 VmaListItem<T>* m_pItem;
3293 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3299 friend class VmaList<T, AllocatorT>;
3302 class const_iterator
3311 const_iterator(
const iterator& src) :
3312 m_pList(src.m_pList),
3313 m_pItem(src.m_pItem)
3317 const T& operator*()
const 3319 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3320 return m_pItem->Value;
3322 const T* operator->()
const 3324 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3325 return &m_pItem->Value;
3328 const_iterator& operator++()
3330 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3331 m_pItem = m_pItem->pNext;
3334 const_iterator& operator--()
3336 if(m_pItem != VMA_NULL)
3338 m_pItem = m_pItem->pPrev;
3342 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3343 m_pItem = m_pList->Back();
3348 const_iterator operator++(
int)
3350 const_iterator result = *
this;
3354 const_iterator operator--(
int)
3356 const_iterator result = *
this;
3361 bool operator==(
const const_iterator& rhs)
const 3363 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3364 return m_pItem == rhs.m_pItem;
3366 bool operator!=(
const const_iterator& rhs)
const 3368 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3369 return m_pItem != rhs.m_pItem;
3373 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3379 const VmaRawList<T>* m_pList;
3380 const VmaListItem<T>* m_pItem;
3382 friend class VmaList<T, AllocatorT>;
3385 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3387 bool empty()
const {
return m_RawList.IsEmpty(); }
3388 size_t size()
const {
return m_RawList.GetCount(); }
3390 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3391 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3393 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3394 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3396 void clear() { m_RawList.Clear(); }
3397 void push_back(
const T& value) { m_RawList.PushBack(value); }
3398 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3399 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3402 VmaRawList<T> m_RawList;
3405 #endif // #if VMA_USE_STL_LIST 3413 #if VMA_USE_STL_UNORDERED_MAP 3415 #define VmaPair std::pair 3417 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3418 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3420 #else // #if VMA_USE_STL_UNORDERED_MAP 3422 template<
typename T1,
typename T2>
3428 VmaPair() : first(), second() { }
3429 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3435 template<
typename KeyT,
typename ValueT>
3439 typedef VmaPair<KeyT, ValueT> PairType;
3440 typedef PairType* iterator;
3442 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3444 iterator begin() {
return m_Vector.begin(); }
3445 iterator end() {
return m_Vector.end(); }
3447 void insert(
const PairType& pair);
3448 iterator find(
const KeyT& key);
3449 void erase(iterator it);
3452 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3455 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3457 template<
typename FirstT,
typename SecondT>
3458 struct VmaPairFirstLess
3460 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3462 return lhs.first < rhs.first;
3464 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3466 return lhs.first < rhsFirst;
3470 template<
typename KeyT,
typename ValueT>
3471 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3473 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3475 m_Vector.data() + m_Vector.size(),
3477 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3478 VmaVectorInsert(m_Vector, indexToInsert, pair);
3481 template<
typename KeyT,
typename ValueT>
3482 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3484 PairType* it = VmaBinaryFindFirstNotLess(
3486 m_Vector.data() + m_Vector.size(),
3488 VmaPairFirstLess<KeyT, ValueT>());
3489 if((it != m_Vector.end()) && (it->first == key))
3495 return m_Vector.end();
3499 template<
typename KeyT,
typename ValueT>
3500 void VmaMap<KeyT, ValueT>::erase(iterator it)
3502 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3505 #endif // #if VMA_USE_STL_UNORDERED_MAP 3511 class VmaDeviceMemoryBlock;
3513 struct VmaAllocation_T
3516 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3520 FLAG_USER_DATA_STRING = 0x01,
3524 enum ALLOCATION_TYPE
3526 ALLOCATION_TYPE_NONE,
3527 ALLOCATION_TYPE_BLOCK,
3528 ALLOCATION_TYPE_DEDICATED,
3531 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3534 m_pUserData(VMA_NULL),
3535 m_LastUseFrameIndex(currentFrameIndex),
3536 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3537 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3539 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3545 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3548 VMA_ASSERT(m_pUserData == VMA_NULL);
3551 void InitBlockAllocation(
3553 VmaDeviceMemoryBlock* block,
3554 VkDeviceSize offset,
3555 VkDeviceSize alignment,
3557 VmaSuballocationType suballocationType,
3561 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3562 VMA_ASSERT(block != VMA_NULL);
3563 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3564 m_Alignment = alignment;
3566 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3567 m_SuballocationType = (uint8_t)suballocationType;
3568 m_BlockAllocation.m_hPool = hPool;
3569 m_BlockAllocation.m_Block = block;
3570 m_BlockAllocation.m_Offset = offset;
3571 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3576 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3577 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3578 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3579 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3580 m_BlockAllocation.m_Block = VMA_NULL;
3581 m_BlockAllocation.m_Offset = 0;
3582 m_BlockAllocation.m_CanBecomeLost =
true;
3585 void ChangeBlockAllocation(
3586 VmaAllocator hAllocator,
3587 VmaDeviceMemoryBlock* block,
3588 VkDeviceSize offset);
3591 void InitDedicatedAllocation(
3592 uint32_t memoryTypeIndex,
3593 VkDeviceMemory hMemory,
3594 VmaSuballocationType suballocationType,
3598 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3599 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3600 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3603 m_SuballocationType = (uint8_t)suballocationType;
3604 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3605 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3606 m_DedicatedAllocation.m_hMemory = hMemory;
3607 m_DedicatedAllocation.m_pMappedData = pMappedData;
3610 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3611 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3612 VkDeviceSize GetSize()
const {
return m_Size; }
3613 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3614 void* GetUserData()
const {
return m_pUserData; }
3615 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3616 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3618 VmaDeviceMemoryBlock* GetBlock()
const 3620 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3621 return m_BlockAllocation.m_Block;
3623 VkDeviceSize GetOffset()
const;
3624 VkDeviceMemory GetMemory()
const;
3625 uint32_t GetMemoryTypeIndex()
const;
3626 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3627 void* GetMappedData()
const;
3628 bool CanBecomeLost()
const;
3629 VmaPool GetPool()
const;
3631 uint32_t GetLastUseFrameIndex()
const 3633 return m_LastUseFrameIndex.load();
3635 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3637 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3647 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3649 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3651 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3662 void BlockAllocMap();
3663 void BlockAllocUnmap();
3664 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3665 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3668 VkDeviceSize m_Alignment;
3669 VkDeviceSize m_Size;
3671 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3673 uint8_t m_SuballocationType;
3680 struct BlockAllocation
3683 VmaDeviceMemoryBlock* m_Block;
3684 VkDeviceSize m_Offset;
3685 bool m_CanBecomeLost;
3689 struct DedicatedAllocation
3691 uint32_t m_MemoryTypeIndex;
3692 VkDeviceMemory m_hMemory;
3693 void* m_pMappedData;
3699 BlockAllocation m_BlockAllocation;
3701 DedicatedAllocation m_DedicatedAllocation;
3704 void FreeUserDataString(VmaAllocator hAllocator);
3711 struct VmaSuballocation
3713 VkDeviceSize offset;
3715 VmaAllocation hAllocation;
3716 VmaSuballocationType type;
3719 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3722 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3737 struct VmaAllocationRequest
3739 VkDeviceSize offset;
3740 VkDeviceSize sumFreeSize;
3741 VkDeviceSize sumItemSize;
3742 VmaSuballocationList::iterator item;
3743 size_t itemsToMakeLostCount;
3745 VkDeviceSize CalcCost()
const 3747 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3755 class VmaBlockMetadata
3758 VmaBlockMetadata(VmaAllocator hAllocator);
3759 ~VmaBlockMetadata();
3760 void Init(VkDeviceSize size);
3763 bool Validate()
const;
3764 VkDeviceSize GetSize()
const {
return m_Size; }
3765 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3766 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3767 VkDeviceSize GetUnusedRangeSizeMax()
const;
3769 bool IsEmpty()
const;
3771 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3774 #if VMA_STATS_STRING_ENABLED 3775 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3779 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3784 bool CreateAllocationRequest(
3785 uint32_t currentFrameIndex,
3786 uint32_t frameInUseCount,
3787 VkDeviceSize bufferImageGranularity,
3788 VkDeviceSize allocSize,
3789 VkDeviceSize allocAlignment,
3790 VmaSuballocationType allocType,
3791 bool canMakeOtherLost,
3792 VmaAllocationRequest* pAllocationRequest);
3794 bool MakeRequestedAllocationsLost(
3795 uint32_t currentFrameIndex,
3796 uint32_t frameInUseCount,
3797 VmaAllocationRequest* pAllocationRequest);
3799 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3803 const VmaAllocationRequest& request,
3804 VmaSuballocationType type,
3805 VkDeviceSize allocSize,
3806 VmaAllocation hAllocation);
3809 void Free(
const VmaAllocation allocation);
3810 void FreeAtOffset(VkDeviceSize offset);
3813 VkDeviceSize m_Size;
3814 uint32_t m_FreeCount;
3815 VkDeviceSize m_SumFreeSize;
3816 VmaSuballocationList m_Suballocations;
3819 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3821 bool ValidateFreeSuballocationList()
const;
3825 bool CheckAllocation(
3826 uint32_t currentFrameIndex,
3827 uint32_t frameInUseCount,
3828 VkDeviceSize bufferImageGranularity,
3829 VkDeviceSize allocSize,
3830 VkDeviceSize allocAlignment,
3831 VmaSuballocationType allocType,
3832 VmaSuballocationList::const_iterator suballocItem,
3833 bool canMakeOtherLost,
3834 VkDeviceSize* pOffset,
3835 size_t* itemsToMakeLostCount,
3836 VkDeviceSize* pSumFreeSize,
3837 VkDeviceSize* pSumItemSize)
const;
3839 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3843 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3846 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3849 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3853 class VmaDeviceMemoryMapping
3856 VmaDeviceMemoryMapping();
3857 ~VmaDeviceMemoryMapping();
3859 void* GetMappedData()
const {
return m_pMappedData; }
3862 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData);
3863 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count);
3867 uint32_t m_MapCount;
3868 void* m_pMappedData;
3877 class VmaDeviceMemoryBlock
3880 uint32_t m_MemoryTypeIndex;
3881 VkDeviceMemory m_hMemory;
3882 VmaDeviceMemoryMapping m_Mapping;
3883 VmaBlockMetadata m_Metadata;
3885 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3887 ~VmaDeviceMemoryBlock()
3889 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3894 uint32_t newMemoryTypeIndex,
3895 VkDeviceMemory newMemory,
3896 VkDeviceSize newSize);
3898 void Destroy(VmaAllocator allocator);
3901 bool Validate()
const;
3904 VkResult Map(VmaAllocator hAllocator, uint32_t count,
void** ppData);
3905 void Unmap(VmaAllocator hAllocator, uint32_t count);
3908 struct VmaPointerLess
3910 bool operator()(
const void* lhs,
const void* rhs)
const 3916 class VmaDefragmentator;
3924 struct VmaBlockVector
3927 VmaAllocator hAllocator,
3928 uint32_t memoryTypeIndex,
3929 VkDeviceSize preferredBlockSize,
3930 size_t minBlockCount,
3931 size_t maxBlockCount,
3932 VkDeviceSize bufferImageGranularity,
3933 uint32_t frameInUseCount,
3937 VkResult CreateMinBlocks();
3939 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3940 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3941 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3942 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3946 bool IsEmpty()
const {
return m_Blocks.empty(); }
3949 VmaPool hCurrentPool,
3950 uint32_t currentFrameIndex,
3951 const VkMemoryRequirements& vkMemReq,
3953 VmaSuballocationType suballocType,
3954 VmaAllocation* pAllocation);
3957 VmaAllocation hAllocation);
3962 #if VMA_STATS_STRING_ENABLED 3963 void PrintDetailedMap(
class VmaJsonWriter& json);
3966 void MakePoolAllocationsLost(
3967 uint32_t currentFrameIndex,
3968 size_t* pLostAllocationCount);
3970 VmaDefragmentator* EnsureDefragmentator(
3971 VmaAllocator hAllocator,
3972 uint32_t currentFrameIndex);
3974 VkResult Defragment(
3976 VkDeviceSize& maxBytesToMove,
3977 uint32_t& maxAllocationsToMove);
3979 void DestroyDefragmentator();
3982 friend class VmaDefragmentator;
3984 const VmaAllocator m_hAllocator;
3985 const uint32_t m_MemoryTypeIndex;
3986 const VkDeviceSize m_PreferredBlockSize;
3987 const size_t m_MinBlockCount;
3988 const size_t m_MaxBlockCount;
3989 const VkDeviceSize m_BufferImageGranularity;
3990 const uint32_t m_FrameInUseCount;
3991 const bool m_IsCustomPool;
3994 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3998 bool m_HasEmptyBlock;
3999 VmaDefragmentator* m_pDefragmentator;
4001 size_t CalcMaxBlockSize()
const;
4004 void Remove(VmaDeviceMemoryBlock* pBlock);
4008 void IncrementallySortBlocks();
4010 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
4016 VmaBlockVector m_BlockVector;
4020 VmaAllocator hAllocator,
4024 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
4026 #if VMA_STATS_STRING_ENABLED 4031 class VmaDefragmentator
4033 const VmaAllocator m_hAllocator;
4034 VmaBlockVector*
const m_pBlockVector;
4035 uint32_t m_CurrentFrameIndex;
4036 VkDeviceSize m_BytesMoved;
4037 uint32_t m_AllocationsMoved;
4039 struct AllocationInfo
4041 VmaAllocation m_hAllocation;
4042 VkBool32* m_pChanged;
4045 m_hAllocation(VK_NULL_HANDLE),
4046 m_pChanged(VMA_NULL)
4051 struct AllocationInfoSizeGreater
4053 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 4055 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4060 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4064 VmaDeviceMemoryBlock* m_pBlock;
4065 bool m_HasNonMovableAllocations;
4066 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4068 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4070 m_HasNonMovableAllocations(true),
4071 m_Allocations(pAllocationCallbacks),
4072 m_pMappedDataForDefragmentation(VMA_NULL)
4076 void CalcHasNonMovableAllocations()
4078 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4079 const size_t defragmentAllocCount = m_Allocations.size();
4080 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4083 void SortAllocationsBySizeDescecnding()
4085 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4088 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
4089 void Unmap(VmaAllocator hAllocator);
4093 void* m_pMappedDataForDefragmentation;
4096 struct BlockPointerLess
4098 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4100 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4102 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4104 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4110 struct BlockInfoCompareMoveDestination
4112 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4114 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4118 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4122 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4130 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4131 BlockInfoVector m_Blocks;
4133 VkResult DefragmentRound(
4134 VkDeviceSize maxBytesToMove,
4135 uint32_t maxAllocationsToMove);
4137 static bool MoveMakesSense(
4138 size_t dstBlockIndex, VkDeviceSize dstOffset,
4139 size_t srcBlockIndex, VkDeviceSize srcOffset);
4143 VmaAllocator hAllocator,
4144 VmaBlockVector* pBlockVector,
4145 uint32_t currentFrameIndex);
4147 ~VmaDefragmentator();
4149 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4150 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4152 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
4154 VkResult Defragment(
4155 VkDeviceSize maxBytesToMove,
4156 uint32_t maxAllocationsToMove);
4160 struct VmaAllocator_T
4163 bool m_UseKhrDedicatedAllocation;
4165 bool m_AllocationCallbacksSpecified;
4166 VkAllocationCallbacks m_AllocationCallbacks;
4170 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4171 VMA_MUTEX m_HeapSizeLimitMutex;
4173 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4174 VkPhysicalDeviceMemoryProperties m_MemProps;
4177 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4180 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4181 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4182 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4187 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4189 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4193 return m_VulkanFunctions;
4196 VkDeviceSize GetBufferImageGranularity()
const 4199 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4200 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4203 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4204 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4206 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4208 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4209 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4212 void GetBufferMemoryRequirements(
4214 VkMemoryRequirements& memReq,
4215 bool& requiresDedicatedAllocation,
4216 bool& prefersDedicatedAllocation)
const;
4217 void GetImageMemoryRequirements(
4219 VkMemoryRequirements& memReq,
4220 bool& requiresDedicatedAllocation,
4221 bool& prefersDedicatedAllocation)
const;
4224 VkResult AllocateMemory(
4225 const VkMemoryRequirements& vkMemReq,
4226 bool requiresDedicatedAllocation,
4227 bool prefersDedicatedAllocation,
4228 VkBuffer dedicatedBuffer,
4229 VkImage dedicatedImage,
4231 VmaSuballocationType suballocType,
4232 VmaAllocation* pAllocation);
4235 void FreeMemory(
const VmaAllocation allocation);
4237 void CalculateStats(
VmaStats* pStats);
4239 #if VMA_STATS_STRING_ENABLED 4240 void PrintDetailedMap(
class VmaJsonWriter& json);
4243 VkResult Defragment(
4244 VmaAllocation* pAllocations,
4245 size_t allocationCount,
4246 VkBool32* pAllocationsChanged,
4250 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
4251 bool TouchAllocation(VmaAllocation hAllocation);
4254 void DestroyPool(VmaPool pool);
4255 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
4257 void SetCurrentFrameIndex(uint32_t frameIndex);
4259 void MakePoolAllocationsLost(
4261 size_t* pLostAllocationCount);
4263 void CreateLostAllocation(VmaAllocation* pAllocation);
4265 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4266 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4268 VkResult Map(VmaAllocation hAllocation,
void** ppData);
4269 void Unmap(VmaAllocation hAllocation);
4272 VkDeviceSize m_PreferredLargeHeapBlockSize;
4274 VkPhysicalDevice m_PhysicalDevice;
4275 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4277 VMA_MUTEX m_PoolsMutex;
4279 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4285 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4287 VkResult AllocateMemoryOfType(
4288 const VkMemoryRequirements& vkMemReq,
4289 bool dedicatedAllocation,
4290 VkBuffer dedicatedBuffer,
4291 VkImage dedicatedImage,
4293 uint32_t memTypeIndex,
4294 VmaSuballocationType suballocType,
4295 VmaAllocation* pAllocation);
4298 VkResult AllocateDedicatedMemory(
4300 VmaSuballocationType suballocType,
4301 uint32_t memTypeIndex,
4303 bool isUserDataString,
4305 VkBuffer dedicatedBuffer,
4306 VkImage dedicatedImage,
4307 VmaAllocation* pAllocation);
4310 void FreeDedicatedMemory(VmaAllocation allocation);
4316 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
4318 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4321 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
4323 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4326 template<
typename T>
4327 static T* VmaAllocate(VmaAllocator hAllocator)
4329 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4332 template<
typename T>
4333 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
4335 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4338 template<
typename T>
4339 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4344 VmaFree(hAllocator, ptr);
4348 template<
typename T>
4349 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4353 for(
size_t i = count; i--; )
4355 VmaFree(hAllocator, ptr);
4362 #if VMA_STATS_STRING_ENABLED 4364 class VmaStringBuilder
4367 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4368 size_t GetLength()
const {
return m_Data.size(); }
4369 const char* GetData()
const {
return m_Data.data(); }
4371 void Add(
char ch) { m_Data.push_back(ch); }
4372 void Add(
const char* pStr);
4373 void AddNewLine() { Add(
'\n'); }
4374 void AddNumber(uint32_t num);
4375 void AddNumber(uint64_t num);
4376 void AddPointer(
const void* ptr);
4379 VmaVector< char, VmaStlAllocator<char> > m_Data;
4382 void VmaStringBuilder::Add(
const char* pStr)
4384 const size_t strLen = strlen(pStr);
4387 const size_t oldCount = m_Data.size();
4388 m_Data.resize(oldCount + strLen);
4389 memcpy(m_Data.data() + oldCount, pStr, strLen);
4393 void VmaStringBuilder::AddNumber(uint32_t num)
4396 VmaUint32ToStr(buf,
sizeof(buf), num);
4400 void VmaStringBuilder::AddNumber(uint64_t num)
4403 VmaUint64ToStr(buf,
sizeof(buf), num);
4407 void VmaStringBuilder::AddPointer(
const void* ptr)
4410 VmaPtrToStr(buf,
sizeof(buf), ptr);
4414 #endif // #if VMA_STATS_STRING_ENABLED 4419 #if VMA_STATS_STRING_ENABLED 4424 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4427 void BeginObject(
bool singleLine =
false);
4430 void BeginArray(
bool singleLine =
false);
4433 void WriteString(
const char* pStr);
4434 void BeginString(
const char* pStr = VMA_NULL);
4435 void ContinueString(
const char* pStr);
4436 void ContinueString(uint32_t n);
4437 void ContinueString(uint64_t n);
4438 void ContinueString_Pointer(
const void* ptr);
4439 void EndString(
const char* pStr = VMA_NULL);
4441 void WriteNumber(uint32_t n);
4442 void WriteNumber(uint64_t n);
4443 void WriteBool(
bool b);
4447 static const char*
const INDENT;
4449 enum COLLECTION_TYPE
4451 COLLECTION_TYPE_OBJECT,
4452 COLLECTION_TYPE_ARRAY,
4456 COLLECTION_TYPE type;
4457 uint32_t valueCount;
4458 bool singleLineMode;
4461 VmaStringBuilder& m_SB;
4462 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4463 bool m_InsideString;
4465 void BeginValue(
bool isString);
4466 void WriteIndent(
bool oneLess =
false);
4469 const char*
const VmaJsonWriter::INDENT =
" ";
4471 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4473 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4474 m_InsideString(false)
4478 VmaJsonWriter::~VmaJsonWriter()
4480 VMA_ASSERT(!m_InsideString);
4481 VMA_ASSERT(m_Stack.empty());
4484 void VmaJsonWriter::BeginObject(
bool singleLine)
4486 VMA_ASSERT(!m_InsideString);
4492 item.type = COLLECTION_TYPE_OBJECT;
4493 item.valueCount = 0;
4494 item.singleLineMode = singleLine;
4495 m_Stack.push_back(item);
4498 void VmaJsonWriter::EndObject()
4500 VMA_ASSERT(!m_InsideString);
4505 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4509 void VmaJsonWriter::BeginArray(
bool singleLine)
4511 VMA_ASSERT(!m_InsideString);
4517 item.type = COLLECTION_TYPE_ARRAY;
4518 item.valueCount = 0;
4519 item.singleLineMode = singleLine;
4520 m_Stack.push_back(item);
4523 void VmaJsonWriter::EndArray()
4525 VMA_ASSERT(!m_InsideString);
4530 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4534 void VmaJsonWriter::WriteString(
const char* pStr)
4540 void VmaJsonWriter::BeginString(
const char* pStr)
4542 VMA_ASSERT(!m_InsideString);
4546 m_InsideString =
true;
4547 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4549 ContinueString(pStr);
4553 void VmaJsonWriter::ContinueString(
const char* pStr)
4555 VMA_ASSERT(m_InsideString);
4557 const size_t strLen = strlen(pStr);
4558 for(
size_t i = 0; i < strLen; ++i)
4591 VMA_ASSERT(0 &&
"Character not currently supported.");
4597 void VmaJsonWriter::ContinueString(uint32_t n)
4599 VMA_ASSERT(m_InsideString);
4603 void VmaJsonWriter::ContinueString(uint64_t n)
4605 VMA_ASSERT(m_InsideString);
4609 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4611 VMA_ASSERT(m_InsideString);
4612 m_SB.AddPointer(ptr);
4615 void VmaJsonWriter::EndString(
const char* pStr)
4617 VMA_ASSERT(m_InsideString);
4618 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4620 ContinueString(pStr);
4623 m_InsideString =
false;
4626 void VmaJsonWriter::WriteNumber(uint32_t n)
4628 VMA_ASSERT(!m_InsideString);
4633 void VmaJsonWriter::WriteNumber(uint64_t n)
4635 VMA_ASSERT(!m_InsideString);
4640 void VmaJsonWriter::WriteBool(
bool b)
4642 VMA_ASSERT(!m_InsideString);
4644 m_SB.Add(b ?
"true" :
"false");
4647 void VmaJsonWriter::WriteNull()
4649 VMA_ASSERT(!m_InsideString);
4654 void VmaJsonWriter::BeginValue(
bool isString)
4656 if(!m_Stack.empty())
4658 StackItem& currItem = m_Stack.back();
4659 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4660 currItem.valueCount % 2 == 0)
4662 VMA_ASSERT(isString);
4665 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4666 currItem.valueCount % 2 != 0)
4670 else if(currItem.valueCount > 0)
4679 ++currItem.valueCount;
4683 void VmaJsonWriter::WriteIndent(
bool oneLess)
4685 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4689 size_t count = m_Stack.size();
4690 if(count > 0 && oneLess)
4694 for(
size_t i = 0; i < count; ++i)
4701 #endif // #if VMA_STATS_STRING_ENABLED 4705 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4707 if(IsUserDataString())
4709 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4711 FreeUserDataString(hAllocator);
4713 if(pUserData != VMA_NULL)
4715 const char*
const newStrSrc = (
char*)pUserData;
4716 const size_t newStrLen = strlen(newStrSrc);
4717 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4718 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4719 m_pUserData = newStrDst;
4724 m_pUserData = pUserData;
4728 void VmaAllocation_T::ChangeBlockAllocation(
4729 VmaAllocator hAllocator,
4730 VmaDeviceMemoryBlock* block,
4731 VkDeviceSize offset)
4733 VMA_ASSERT(block != VMA_NULL);
4734 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4737 if(block != m_BlockAllocation.m_Block)
4739 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4740 if(IsPersistentMap())
4742 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4743 block->Map(hAllocator, mapRefCount, VMA_NULL);
4746 m_BlockAllocation.m_Block = block;
4747 m_BlockAllocation.m_Offset = offset;
4750 VkDeviceSize VmaAllocation_T::GetOffset()
const 4754 case ALLOCATION_TYPE_BLOCK:
4755 return m_BlockAllocation.m_Offset;
4756 case ALLOCATION_TYPE_DEDICATED:
4764 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4768 case ALLOCATION_TYPE_BLOCK:
4769 return m_BlockAllocation.m_Block->m_hMemory;
4770 case ALLOCATION_TYPE_DEDICATED:
4771 return m_DedicatedAllocation.m_hMemory;
4774 return VK_NULL_HANDLE;
4778 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4782 case ALLOCATION_TYPE_BLOCK:
4783 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4784 case ALLOCATION_TYPE_DEDICATED:
4785 return m_DedicatedAllocation.m_MemoryTypeIndex;
4792 void* VmaAllocation_T::GetMappedData()
const 4796 case ALLOCATION_TYPE_BLOCK:
4799 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4800 VMA_ASSERT(pBlockData != VMA_NULL);
4801 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4808 case ALLOCATION_TYPE_DEDICATED:
4809 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4810 return m_DedicatedAllocation.m_pMappedData;
4817 bool VmaAllocation_T::CanBecomeLost()
const 4821 case ALLOCATION_TYPE_BLOCK:
4822 return m_BlockAllocation.m_CanBecomeLost;
4823 case ALLOCATION_TYPE_DEDICATED:
4831 VmaPool VmaAllocation_T::GetPool()
const 4833 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4834 return m_BlockAllocation.m_hPool;
4837 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4839 VMA_ASSERT(CanBecomeLost());
4845 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4848 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4853 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4859 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4869 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4871 VMA_ASSERT(IsUserDataString());
4872 if(m_pUserData != VMA_NULL)
4874 char*
const oldStr = (
char*)m_pUserData;
4875 const size_t oldStrLen = strlen(oldStr);
4876 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4877 m_pUserData = VMA_NULL;
4881 void VmaAllocation_T::BlockAllocMap()
4883 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4885 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4891 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4895 void VmaAllocation_T::BlockAllocUnmap()
4897 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4899 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4905 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4909 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4911 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4915 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4917 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4918 *ppData = m_DedicatedAllocation.m_pMappedData;
4924 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4925 return VK_ERROR_MEMORY_MAP_FAILED;
4930 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4931 hAllocator->m_hDevice,
4932 m_DedicatedAllocation.m_hMemory,
4937 if(result == VK_SUCCESS)
4939 m_DedicatedAllocation.m_pMappedData = *ppData;
4946 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4948 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4950 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4955 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4956 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4957 hAllocator->m_hDevice,
4958 m_DedicatedAllocation.m_hMemory);
4963 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4967 #if VMA_STATS_STRING_ENABLED 4970 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4979 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4983 json.WriteString(
"Blocks");
4986 json.WriteString(
"Allocations");
4989 json.WriteString(
"UnusedRanges");
4992 json.WriteString(
"UsedBytes");
4995 json.WriteString(
"UnusedBytes");
5000 json.WriteString(
"AllocationSize");
5001 json.BeginObject(
true);
5002 json.WriteString(
"Min");
5004 json.WriteString(
"Avg");
5006 json.WriteString(
"Max");
5013 json.WriteString(
"UnusedRangeSize");
5014 json.BeginObject(
true);
5015 json.WriteString(
"Min");
5017 json.WriteString(
"Avg");
5019 json.WriteString(
"Max");
5027 #endif // #if VMA_STATS_STRING_ENABLED 5029 struct VmaSuballocationItemSizeLess
5032 const VmaSuballocationList::iterator lhs,
5033 const VmaSuballocationList::iterator rhs)
const 5035 return lhs->size < rhs->size;
5038 const VmaSuballocationList::iterator lhs,
5039 VkDeviceSize rhsSize)
const 5041 return lhs->size < rhsSize;
5048 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
5052 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5053 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5057 VmaBlockMetadata::~VmaBlockMetadata()
5061 void VmaBlockMetadata::Init(VkDeviceSize size)
5065 m_SumFreeSize = size;
5067 VmaSuballocation suballoc = {};
5068 suballoc.offset = 0;
5069 suballoc.size = size;
5070 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5071 suballoc.hAllocation = VK_NULL_HANDLE;
5073 m_Suballocations.push_back(suballoc);
5074 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5076 m_FreeSuballocationsBySize.push_back(suballocItem);
5079 bool VmaBlockMetadata::Validate()
const 5081 if(m_Suballocations.empty())
5087 VkDeviceSize calculatedOffset = 0;
5089 uint32_t calculatedFreeCount = 0;
5091 VkDeviceSize calculatedSumFreeSize = 0;
5094 size_t freeSuballocationsToRegister = 0;
5096 bool prevFree =
false;
5098 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5099 suballocItem != m_Suballocations.cend();
5102 const VmaSuballocation& subAlloc = *suballocItem;
5105 if(subAlloc.offset != calculatedOffset)
5110 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5112 if(prevFree && currFree)
5117 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5124 calculatedSumFreeSize += subAlloc.size;
5125 ++calculatedFreeCount;
5126 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5128 ++freeSuballocationsToRegister;
5133 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5137 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5143 calculatedOffset += subAlloc.size;
5144 prevFree = currFree;
5149 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5154 VkDeviceSize lastSize = 0;
5155 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5157 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5160 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5165 if(suballocItem->size < lastSize)
5170 lastSize = suballocItem->size;
5174 if(!ValidateFreeSuballocationList() ||
5175 (calculatedOffset != m_Size) ||
5176 (calculatedSumFreeSize != m_SumFreeSize) ||
5177 (calculatedFreeCount != m_FreeCount))
5185 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5187 if(!m_FreeSuballocationsBySize.empty())
5189 return m_FreeSuballocationsBySize.back()->size;
5197 bool VmaBlockMetadata::IsEmpty()
const 5199 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5202 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5206 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5218 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5219 suballocItem != m_Suballocations.cend();
5222 const VmaSuballocation& suballoc = *suballocItem;
5223 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5236 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5238 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5240 inoutStats.
size += m_Size;
5247 #if VMA_STATS_STRING_ENABLED 5249 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5253 json.WriteString(
"TotalBytes");
5254 json.WriteNumber(m_Size);
5256 json.WriteString(
"UnusedBytes");
5257 json.WriteNumber(m_SumFreeSize);
5259 json.WriteString(
"Allocations");
5260 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5262 json.WriteString(
"UnusedRanges");
5263 json.WriteNumber(m_FreeCount);
5265 json.WriteString(
"Suballocations");
5268 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5269 suballocItem != m_Suballocations.cend();
5270 ++suballocItem, ++i)
5272 json.BeginObject(
true);
5274 json.WriteString(
"Type");
5275 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5277 json.WriteString(
"Size");
5278 json.WriteNumber(suballocItem->size);
5280 json.WriteString(
"Offset");
5281 json.WriteNumber(suballocItem->offset);
5283 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5285 const void* pUserData = suballocItem->hAllocation->GetUserData();
5286 if(pUserData != VMA_NULL)
5288 json.WriteString(
"UserData");
5289 if(suballocItem->hAllocation->IsUserDataString())
5291 json.WriteString((
const char*)pUserData);
5296 json.ContinueString_Pointer(pUserData);
5309 #endif // #if VMA_STATS_STRING_ENABLED 5321 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5323 VMA_ASSERT(IsEmpty());
5324 pAllocationRequest->offset = 0;
5325 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5326 pAllocationRequest->sumItemSize = 0;
5327 pAllocationRequest->item = m_Suballocations.begin();
5328 pAllocationRequest->itemsToMakeLostCount = 0;
5331 bool VmaBlockMetadata::CreateAllocationRequest(
5332 uint32_t currentFrameIndex,
5333 uint32_t frameInUseCount,
5334 VkDeviceSize bufferImageGranularity,
5335 VkDeviceSize allocSize,
5336 VkDeviceSize allocAlignment,
5337 VmaSuballocationType allocType,
5338 bool canMakeOtherLost,
5339 VmaAllocationRequest* pAllocationRequest)
5341 VMA_ASSERT(allocSize > 0);
5342 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5343 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5344 VMA_HEAVY_ASSERT(Validate());
5347 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5353 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5354 if(freeSuballocCount > 0)
5359 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5360 m_FreeSuballocationsBySize.data(),
5361 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5363 VmaSuballocationItemSizeLess());
5364 size_t index = it - m_FreeSuballocationsBySize.data();
5365 for(; index < freeSuballocCount; ++index)
5370 bufferImageGranularity,
5374 m_FreeSuballocationsBySize[index],
5376 &pAllocationRequest->offset,
5377 &pAllocationRequest->itemsToMakeLostCount,
5378 &pAllocationRequest->sumFreeSize,
5379 &pAllocationRequest->sumItemSize))
5381 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5389 for(
size_t index = freeSuballocCount; index--; )
5394 bufferImageGranularity,
5398 m_FreeSuballocationsBySize[index],
5400 &pAllocationRequest->offset,
5401 &pAllocationRequest->itemsToMakeLostCount,
5402 &pAllocationRequest->sumFreeSize,
5403 &pAllocationRequest->sumItemSize))
5405 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5412 if(canMakeOtherLost)
5416 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5417 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5419 VmaAllocationRequest tmpAllocRequest = {};
5420 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5421 suballocIt != m_Suballocations.end();
5424 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5425 suballocIt->hAllocation->CanBecomeLost())
5430 bufferImageGranularity,
5436 &tmpAllocRequest.offset,
5437 &tmpAllocRequest.itemsToMakeLostCount,
5438 &tmpAllocRequest.sumFreeSize,
5439 &tmpAllocRequest.sumItemSize))
5441 tmpAllocRequest.item = suballocIt;
5443 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5445 *pAllocationRequest = tmpAllocRequest;
5451 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5460 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5461 uint32_t currentFrameIndex,
5462 uint32_t frameInUseCount,
5463 VmaAllocationRequest* pAllocationRequest)
5465 while(pAllocationRequest->itemsToMakeLostCount > 0)
5467 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5469 ++pAllocationRequest->item;
5471 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5472 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5473 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5474 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5476 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5477 --pAllocationRequest->itemsToMakeLostCount;
5485 VMA_HEAVY_ASSERT(Validate());
5486 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5487 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5492 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5494 uint32_t lostAllocationCount = 0;
5495 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5496 it != m_Suballocations.end();
5499 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5500 it->hAllocation->CanBecomeLost() &&
5501 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5503 it = FreeSuballocation(it);
5504 ++lostAllocationCount;
5507 return lostAllocationCount;
5510 void VmaBlockMetadata::Alloc(
5511 const VmaAllocationRequest& request,
5512 VmaSuballocationType type,
5513 VkDeviceSize allocSize,
5514 VmaAllocation hAllocation)
5516 VMA_ASSERT(request.item != m_Suballocations.end());
5517 VmaSuballocation& suballoc = *request.item;
5519 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5521 VMA_ASSERT(request.offset >= suballoc.offset);
5522 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5523 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5524 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5528 UnregisterFreeSuballocation(request.item);
5530 suballoc.offset = request.offset;
5531 suballoc.size = allocSize;
5532 suballoc.type = type;
5533 suballoc.hAllocation = hAllocation;
5538 VmaSuballocation paddingSuballoc = {};
5539 paddingSuballoc.offset = request.offset + allocSize;
5540 paddingSuballoc.size = paddingEnd;
5541 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5542 VmaSuballocationList::iterator next = request.item;
5544 const VmaSuballocationList::iterator paddingEndItem =
5545 m_Suballocations.insert(next, paddingSuballoc);
5546 RegisterFreeSuballocation(paddingEndItem);
5552 VmaSuballocation paddingSuballoc = {};
5553 paddingSuballoc.offset = request.offset - paddingBegin;
5554 paddingSuballoc.size = paddingBegin;
5555 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5556 const VmaSuballocationList::iterator paddingBeginItem =
5557 m_Suballocations.insert(request.item, paddingSuballoc);
5558 RegisterFreeSuballocation(paddingBeginItem);
5562 m_FreeCount = m_FreeCount - 1;
5563 if(paddingBegin > 0)
5571 m_SumFreeSize -= allocSize;
5574 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5576 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5577 suballocItem != m_Suballocations.end();
5580 VmaSuballocation& suballoc = *suballocItem;
5581 if(suballoc.hAllocation == allocation)
5583 FreeSuballocation(suballocItem);
5584 VMA_HEAVY_ASSERT(Validate());
5588 VMA_ASSERT(0 &&
"Not found!");
5591 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5593 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5594 suballocItem != m_Suballocations.end();
5597 VmaSuballocation& suballoc = *suballocItem;
5598 if(suballoc.offset == offset)
5600 FreeSuballocation(suballocItem);
5604 VMA_ASSERT(0 &&
"Not found!");
5607 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5609 VkDeviceSize lastSize = 0;
5610 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5612 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5614 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5619 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5624 if(it->size < lastSize)
5630 lastSize = it->size;
5635 bool VmaBlockMetadata::CheckAllocation(
5636 uint32_t currentFrameIndex,
5637 uint32_t frameInUseCount,
5638 VkDeviceSize bufferImageGranularity,
5639 VkDeviceSize allocSize,
5640 VkDeviceSize allocAlignment,
5641 VmaSuballocationType allocType,
5642 VmaSuballocationList::const_iterator suballocItem,
5643 bool canMakeOtherLost,
5644 VkDeviceSize* pOffset,
5645 size_t* itemsToMakeLostCount,
5646 VkDeviceSize* pSumFreeSize,
5647 VkDeviceSize* pSumItemSize)
const 5649 VMA_ASSERT(allocSize > 0);
5650 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5651 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5652 VMA_ASSERT(pOffset != VMA_NULL);
5654 *itemsToMakeLostCount = 0;
5658 if(canMakeOtherLost)
5660 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5662 *pSumFreeSize = suballocItem->size;
5666 if(suballocItem->hAllocation->CanBecomeLost() &&
5667 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5669 ++*itemsToMakeLostCount;
5670 *pSumItemSize = suballocItem->size;
5679 if(m_Size - suballocItem->offset < allocSize)
5685 *pOffset = suballocItem->offset;
5688 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5690 *pOffset += VMA_DEBUG_MARGIN;
5694 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5695 *pOffset = VmaAlignUp(*pOffset, alignment);
5699 if(bufferImageGranularity > 1)
5701 bool bufferImageGranularityConflict =
false;
5702 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5703 while(prevSuballocItem != m_Suballocations.cbegin())
5706 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5707 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5709 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5711 bufferImageGranularityConflict =
true;
5719 if(bufferImageGranularityConflict)
5721 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5727 if(*pOffset >= suballocItem->offset + suballocItem->size)
5733 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5736 VmaSuballocationList::const_iterator next = suballocItem;
5738 const VkDeviceSize requiredEndMargin =
5739 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5741 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5743 if(suballocItem->offset + totalSize > m_Size)
5750 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5751 if(totalSize > suballocItem->size)
5753 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5754 while(remainingSize > 0)
5757 if(lastSuballocItem == m_Suballocations.cend())
5761 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5763 *pSumFreeSize += lastSuballocItem->size;
5767 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5768 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5769 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5771 ++*itemsToMakeLostCount;
5772 *pSumItemSize += lastSuballocItem->size;
5779 remainingSize = (lastSuballocItem->size < remainingSize) ?
5780 remainingSize - lastSuballocItem->size : 0;
5786 if(bufferImageGranularity > 1)
5788 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5790 while(nextSuballocItem != m_Suballocations.cend())
5792 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5793 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5795 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5797 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5798 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5799 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5801 ++*itemsToMakeLostCount;
5820 const VmaSuballocation& suballoc = *suballocItem;
5821 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5823 *pSumFreeSize = suballoc.size;
5826 if(suballoc.size < allocSize)
5832 *pOffset = suballoc.offset;
5835 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5837 *pOffset += VMA_DEBUG_MARGIN;
5841 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5842 *pOffset = VmaAlignUp(*pOffset, alignment);
5846 if(bufferImageGranularity > 1)
5848 bool bufferImageGranularityConflict =
false;
5849 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5850 while(prevSuballocItem != m_Suballocations.cbegin())
5853 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5854 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5856 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5858 bufferImageGranularityConflict =
true;
5866 if(bufferImageGranularityConflict)
5868 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5873 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5876 VmaSuballocationList::const_iterator next = suballocItem;
5878 const VkDeviceSize requiredEndMargin =
5879 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5882 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5889 if(bufferImageGranularity > 1)
5891 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5893 while(nextSuballocItem != m_Suballocations.cend())
5895 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5896 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5898 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5917 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5919 VMA_ASSERT(item != m_Suballocations.end());
5920 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5922 VmaSuballocationList::iterator nextItem = item;
5924 VMA_ASSERT(nextItem != m_Suballocations.end());
5925 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5927 item->size += nextItem->size;
5929 m_Suballocations.erase(nextItem);
5932 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5935 VmaSuballocation& suballoc = *suballocItem;
5936 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5937 suballoc.hAllocation = VK_NULL_HANDLE;
5941 m_SumFreeSize += suballoc.size;
5944 bool mergeWithNext =
false;
5945 bool mergeWithPrev =
false;
5947 VmaSuballocationList::iterator nextItem = suballocItem;
5949 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5951 mergeWithNext =
true;
5954 VmaSuballocationList::iterator prevItem = suballocItem;
5955 if(suballocItem != m_Suballocations.begin())
5958 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5960 mergeWithPrev =
true;
5966 UnregisterFreeSuballocation(nextItem);
5967 MergeFreeWithNext(suballocItem);
5972 UnregisterFreeSuballocation(prevItem);
5973 MergeFreeWithNext(prevItem);
5974 RegisterFreeSuballocation(prevItem);
5979 RegisterFreeSuballocation(suballocItem);
5980 return suballocItem;
5984 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5986 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5987 VMA_ASSERT(item->size > 0);
5991 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5993 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5995 if(m_FreeSuballocationsBySize.empty())
5997 m_FreeSuballocationsBySize.push_back(item);
6001 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
6009 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
6011 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6012 VMA_ASSERT(item->size > 0);
6016 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6018 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6020 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6021 m_FreeSuballocationsBySize.data(),
6022 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
6024 VmaSuballocationItemSizeLess());
6025 for(
size_t index = it - m_FreeSuballocationsBySize.data();
6026 index < m_FreeSuballocationsBySize.size();
6029 if(m_FreeSuballocationsBySize[index] == item)
6031 VmaVectorRemove(m_FreeSuballocationsBySize, index);
6034 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
6036 VMA_ASSERT(0 &&
"Not found.");
6045 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
6047 m_pMappedData(VMA_NULL)
6051 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
6053 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6056 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData)
6063 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6066 m_MapCount += count;
6067 VMA_ASSERT(m_pMappedData != VMA_NULL);
6068 if(ppData != VMA_NULL)
6070 *ppData = m_pMappedData;
6076 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6077 hAllocator->m_hDevice,
6083 if(result == VK_SUCCESS)
6085 if(ppData != VMA_NULL)
6087 *ppData = m_pMappedData;
6095 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count)
6102 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6103 if(m_MapCount >= count)
6105 m_MapCount -= count;
6108 m_pMappedData = VMA_NULL;
6109 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
6114 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6121 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
6122 m_MemoryTypeIndex(UINT32_MAX),
6123 m_hMemory(VK_NULL_HANDLE),
6124 m_Metadata(hAllocator)
6128 void VmaDeviceMemoryBlock::Init(
6129 uint32_t newMemoryTypeIndex,
6130 VkDeviceMemory newMemory,
6131 VkDeviceSize newSize)
6133 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6135 m_MemoryTypeIndex = newMemoryTypeIndex;
6136 m_hMemory = newMemory;
6138 m_Metadata.Init(newSize);
6141 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
6145 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6147 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6148 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6149 m_hMemory = VK_NULL_HANDLE;
6152 bool VmaDeviceMemoryBlock::Validate()
const 6154 if((m_hMemory == VK_NULL_HANDLE) ||
6155 (m_Metadata.GetSize() == 0))
6160 return m_Metadata.Validate();
6163 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count,
void** ppData)
6165 return m_Mapping.Map(hAllocator, m_hMemory, count, ppData);
6168 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
6170 m_Mapping.Unmap(hAllocator, m_hMemory, count);
6175 memset(&outInfo, 0,
sizeof(outInfo));
6194 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6202 VmaPool_T::VmaPool_T(
6203 VmaAllocator hAllocator,
6207 createInfo.memoryTypeIndex,
6208 createInfo.blockSize,
6209 createInfo.minBlockCount,
6210 createInfo.maxBlockCount,
6212 createInfo.frameInUseCount,
6217 VmaPool_T::~VmaPool_T()
6221 #if VMA_STATS_STRING_ENABLED 6223 #endif // #if VMA_STATS_STRING_ENABLED 6225 VmaBlockVector::VmaBlockVector(
6226 VmaAllocator hAllocator,
6227 uint32_t memoryTypeIndex,
6228 VkDeviceSize preferredBlockSize,
6229 size_t minBlockCount,
6230 size_t maxBlockCount,
6231 VkDeviceSize bufferImageGranularity,
6232 uint32_t frameInUseCount,
6233 bool isCustomPool) :
6234 m_hAllocator(hAllocator),
6235 m_MemoryTypeIndex(memoryTypeIndex),
6236 m_PreferredBlockSize(preferredBlockSize),
6237 m_MinBlockCount(minBlockCount),
6238 m_MaxBlockCount(maxBlockCount),
6239 m_BufferImageGranularity(bufferImageGranularity),
6240 m_FrameInUseCount(frameInUseCount),
6241 m_IsCustomPool(isCustomPool),
6242 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6243 m_HasEmptyBlock(false),
6244 m_pDefragmentator(VMA_NULL)
6248 VmaBlockVector::~VmaBlockVector()
6250 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6252 for(
size_t i = m_Blocks.size(); i--; )
6254 m_Blocks[i]->Destroy(m_hAllocator);
6255 vma_delete(m_hAllocator, m_Blocks[i]);
6259 VkResult VmaBlockVector::CreateMinBlocks()
6261 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6263 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6264 if(res != VK_SUCCESS)
6272 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6280 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6282 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6284 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6286 VMA_HEAVY_ASSERT(pBlock->Validate());
6287 pBlock->m_Metadata.AddPoolStats(*pStats);
6291 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6293 VkResult VmaBlockVector::Allocate(
6294 VmaPool hCurrentPool,
6295 uint32_t currentFrameIndex,
6296 const VkMemoryRequirements& vkMemReq,
6298 VmaSuballocationType suballocType,
6299 VmaAllocation* pAllocation)
6304 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6308 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6310 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6311 VMA_ASSERT(pCurrBlock);
6312 VmaAllocationRequest currRequest = {};
6313 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6316 m_BufferImageGranularity,
6324 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6328 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6329 if(res != VK_SUCCESS)
6336 if(pCurrBlock->m_Metadata.IsEmpty())
6338 m_HasEmptyBlock =
false;
6341 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6342 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6343 (*pAllocation)->InitBlockAllocation(
6352 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6353 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6354 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6359 const bool canCreateNewBlock =
6361 (m_Blocks.size() < m_MaxBlockCount);
6364 if(canCreateNewBlock)
6367 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6368 uint32_t newBlockSizeShift = 0;
6369 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6373 if(m_IsCustomPool ==
false)
6376 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6377 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6379 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6380 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6382 newBlockSize = smallerNewBlockSize;
6383 ++newBlockSizeShift;
6392 size_t newBlockIndex = 0;
6393 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6395 if(m_IsCustomPool ==
false)
6397 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6399 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6400 if(smallerNewBlockSize >= vkMemReq.size)
6402 newBlockSize = smallerNewBlockSize;
6403 ++newBlockSizeShift;
6404 res = CreateBlock(newBlockSize, &newBlockIndex);
6413 if(res == VK_SUCCESS)
6415 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6416 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6420 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6421 if(res != VK_SUCCESS)
6428 VmaAllocationRequest allocRequest;
6429 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6430 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6431 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6432 (*pAllocation)->InitBlockAllocation(
6435 allocRequest.offset,
6441 VMA_HEAVY_ASSERT(pBlock->Validate());
6442 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6443 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6451 if(canMakeOtherLost)
6453 uint32_t tryIndex = 0;
6454 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6456 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6457 VmaAllocationRequest bestRequest = {};
6458 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6462 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6464 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6465 VMA_ASSERT(pCurrBlock);
6466 VmaAllocationRequest currRequest = {};
6467 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6470 m_BufferImageGranularity,
6477 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6478 if(pBestRequestBlock == VMA_NULL ||
6479 currRequestCost < bestRequestCost)
6481 pBestRequestBlock = pCurrBlock;
6482 bestRequest = currRequest;
6483 bestRequestCost = currRequestCost;
6485 if(bestRequestCost == 0)
6493 if(pBestRequestBlock != VMA_NULL)
6497 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6498 if(res != VK_SUCCESS)
6504 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6510 if(pBestRequestBlock->m_Metadata.IsEmpty())
6512 m_HasEmptyBlock =
false;
6515 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6516 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6517 (*pAllocation)->InitBlockAllocation(
6526 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6527 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6528 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6542 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6544 return VK_ERROR_TOO_MANY_OBJECTS;
6548 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6551 void VmaBlockVector::Free(
6552 VmaAllocation hAllocation)
6554 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6558 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6560 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6562 if(hAllocation->IsPersistentMap())
6564 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory, 1);
6567 pBlock->m_Metadata.Free(hAllocation);
6568 VMA_HEAVY_ASSERT(pBlock->Validate());
6570 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6573 if(pBlock->m_Metadata.IsEmpty())
6576 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6578 pBlockToDelete = pBlock;
6584 m_HasEmptyBlock =
true;
6589 else if(m_HasEmptyBlock)
6591 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6592 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6594 pBlockToDelete = pLastBlock;
6595 m_Blocks.pop_back();
6596 m_HasEmptyBlock =
false;
6600 IncrementallySortBlocks();
6605 if(pBlockToDelete != VMA_NULL)
6607 VMA_DEBUG_LOG(
" Deleted empty allocation");
6608 pBlockToDelete->Destroy(m_hAllocator);
6609 vma_delete(m_hAllocator, pBlockToDelete);
6613 size_t VmaBlockVector::CalcMaxBlockSize()
const 6616 for(
size_t i = m_Blocks.size(); i--; )
6618 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6619 if(result >= m_PreferredBlockSize)
6627 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6629 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6631 if(m_Blocks[blockIndex] == pBlock)
6633 VmaVectorRemove(m_Blocks, blockIndex);
6640 void VmaBlockVector::IncrementallySortBlocks()
6643 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6645 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6647 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6653 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6655 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6656 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6657 allocInfo.allocationSize = blockSize;
6658 VkDeviceMemory mem = VK_NULL_HANDLE;
6659 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6668 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6672 allocInfo.allocationSize);
6674 m_Blocks.push_back(pBlock);
6675 if(pNewBlockIndex != VMA_NULL)
6677 *pNewBlockIndex = m_Blocks.size() - 1;
6683 #if VMA_STATS_STRING_ENABLED 6685 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6687 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6693 json.WriteString(
"MemoryTypeIndex");
6694 json.WriteNumber(m_MemoryTypeIndex);
6696 json.WriteString(
"BlockSize");
6697 json.WriteNumber(m_PreferredBlockSize);
6699 json.WriteString(
"BlockCount");
6700 json.BeginObject(
true);
6701 if(m_MinBlockCount > 0)
6703 json.WriteString(
"Min");
6704 json.WriteNumber((uint64_t)m_MinBlockCount);
6706 if(m_MaxBlockCount < SIZE_MAX)
6708 json.WriteString(
"Max");
6709 json.WriteNumber((uint64_t)m_MaxBlockCount);
6711 json.WriteString(
"Cur");
6712 json.WriteNumber((uint64_t)m_Blocks.size());
6715 if(m_FrameInUseCount > 0)
6717 json.WriteString(
"FrameInUseCount");
6718 json.WriteNumber(m_FrameInUseCount);
6723 json.WriteString(
"PreferredBlockSize");
6724 json.WriteNumber(m_PreferredBlockSize);
6727 json.WriteString(
"Blocks");
6729 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6731 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6738 #endif // #if VMA_STATS_STRING_ENABLED 6740 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6741 VmaAllocator hAllocator,
6742 uint32_t currentFrameIndex)
6744 if(m_pDefragmentator == VMA_NULL)
6746 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6752 return m_pDefragmentator;
6755 VkResult VmaBlockVector::Defragment(
6757 VkDeviceSize& maxBytesToMove,
6758 uint32_t& maxAllocationsToMove)
6760 if(m_pDefragmentator == VMA_NULL)
6765 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6768 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6771 if(pDefragmentationStats != VMA_NULL)
6773 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6774 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6777 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6778 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6784 m_HasEmptyBlock =
false;
6785 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6787 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6788 if(pBlock->m_Metadata.IsEmpty())
6790 if(m_Blocks.size() > m_MinBlockCount)
6792 if(pDefragmentationStats != VMA_NULL)
6795 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6798 VmaVectorRemove(m_Blocks, blockIndex);
6799 pBlock->Destroy(m_hAllocator);
6800 vma_delete(m_hAllocator, pBlock);
6804 m_HasEmptyBlock =
true;
6812 void VmaBlockVector::DestroyDefragmentator()
6814 if(m_pDefragmentator != VMA_NULL)
6816 vma_delete(m_hAllocator, m_pDefragmentator);
6817 m_pDefragmentator = VMA_NULL;
6821 void VmaBlockVector::MakePoolAllocationsLost(
6822 uint32_t currentFrameIndex,
6823 size_t* pLostAllocationCount)
6825 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6826 size_t lostAllocationCount = 0;
6827 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6829 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6831 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6833 if(pLostAllocationCount != VMA_NULL)
6835 *pLostAllocationCount = lostAllocationCount;
6839 void VmaBlockVector::AddStats(
VmaStats* pStats)
6841 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6842 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6844 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6846 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6848 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6850 VMA_HEAVY_ASSERT(pBlock->Validate());
6852 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6853 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6854 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6855 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6862 VmaDefragmentator::VmaDefragmentator(
6863 VmaAllocator hAllocator,
6864 VmaBlockVector* pBlockVector,
6865 uint32_t currentFrameIndex) :
6866 m_hAllocator(hAllocator),
6867 m_pBlockVector(pBlockVector),
6868 m_CurrentFrameIndex(currentFrameIndex),
6870 m_AllocationsMoved(0),
6871 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6872 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6876 VmaDefragmentator::~VmaDefragmentator()
6878 for(
size_t i = m_Blocks.size(); i--; )
6880 vma_delete(m_hAllocator, m_Blocks[i]);
6884 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6886 AllocationInfo allocInfo;
6887 allocInfo.m_hAllocation = hAlloc;
6888 allocInfo.m_pChanged = pChanged;
6889 m_Allocations.push_back(allocInfo);
6892 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6895 if(m_pMappedDataForDefragmentation)
6897 *ppMappedData = m_pMappedDataForDefragmentation;
6902 if(m_pBlock->m_Mapping.GetMappedData())
6904 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6909 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
6910 *ppMappedData = m_pMappedDataForDefragmentation;
6914 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6916 if(m_pMappedDataForDefragmentation != VMA_NULL)
6918 m_pBlock->Unmap(hAllocator, 1);
6922 VkResult VmaDefragmentator::DefragmentRound(
6923 VkDeviceSize maxBytesToMove,
6924 uint32_t maxAllocationsToMove)
6926 if(m_Blocks.empty())
6931 size_t srcBlockIndex = m_Blocks.size() - 1;
6932 size_t srcAllocIndex = SIZE_MAX;
6938 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6940 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6943 if(srcBlockIndex == 0)
6950 srcAllocIndex = SIZE_MAX;
6955 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6959 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6960 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6962 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6963 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6964 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6965 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6968 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6970 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6971 VmaAllocationRequest dstAllocRequest;
6972 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6973 m_CurrentFrameIndex,
6974 m_pBlockVector->GetFrameInUseCount(),
6975 m_pBlockVector->GetBufferImageGranularity(),
6980 &dstAllocRequest) &&
6982 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6984 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6987 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6988 (m_BytesMoved + size > maxBytesToMove))
6990 return VK_INCOMPLETE;
6993 void* pDstMappedData = VMA_NULL;
6994 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6995 if(res != VK_SUCCESS)
7000 void* pSrcMappedData = VMA_NULL;
7001 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
7002 if(res != VK_SUCCESS)
7009 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
7010 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
7011 static_cast<size_t>(size));
7013 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
7014 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
7016 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
7018 if(allocInfo.m_pChanged != VMA_NULL)
7020 *allocInfo.m_pChanged = VK_TRUE;
7023 ++m_AllocationsMoved;
7024 m_BytesMoved += size;
7026 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
7034 if(srcAllocIndex > 0)
7040 if(srcBlockIndex > 0)
7043 srcAllocIndex = SIZE_MAX;
7053 VkResult VmaDefragmentator::Defragment(
7054 VkDeviceSize maxBytesToMove,
7055 uint32_t maxAllocationsToMove)
7057 if(m_Allocations.empty())
7063 const size_t blockCount = m_pBlockVector->m_Blocks.size();
7064 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7066 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7067 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7068 m_Blocks.push_back(pBlockInfo);
7072 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7075 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7077 AllocationInfo& allocInfo = m_Allocations[blockIndex];
7079 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7081 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7082 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7083 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7085 (*it)->m_Allocations.push_back(allocInfo);
7093 m_Allocations.clear();
7095 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7097 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7098 pBlockInfo->CalcHasNonMovableAllocations();
7099 pBlockInfo->SortAllocationsBySizeDescecnding();
7103 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7106 VkResult result = VK_SUCCESS;
7107 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7109 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7113 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7115 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7121 bool VmaDefragmentator::MoveMakesSense(
7122 size_t dstBlockIndex, VkDeviceSize dstOffset,
7123 size_t srcBlockIndex, VkDeviceSize srcOffset)
7125 if(dstBlockIndex < srcBlockIndex)
7129 if(dstBlockIndex > srcBlockIndex)
7133 if(dstOffset < srcOffset)
7146 m_hDevice(pCreateInfo->device),
7147 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7148 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7149 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7150 m_PreferredLargeHeapBlockSize(0),
7151 m_PhysicalDevice(pCreateInfo->physicalDevice),
7152 m_CurrentFrameIndex(0),
7153 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
7157 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7158 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7159 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7161 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7162 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7164 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7166 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7177 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7178 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7185 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7187 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7188 if(limit != VK_WHOLE_SIZE)
7190 m_HeapSizeLimit[heapIndex] = limit;
7191 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7193 m_MemProps.memoryHeaps[heapIndex].size = limit;
7199 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7201 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7203 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7209 GetBufferImageGranularity(),
7214 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7218 VmaAllocator_T::~VmaAllocator_T()
7220 VMA_ASSERT(m_Pools.empty());
7222 for(
size_t i = GetMemoryTypeCount(); i--; )
7224 vma_delete(
this, m_pDedicatedAllocations[i]);
7225 vma_delete(
this, m_pBlockVectors[i]);
7229 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7231 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7232 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7233 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7234 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7235 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7236 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7237 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7238 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7239 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7240 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7241 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7242 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7243 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7244 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7245 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7246 if(m_UseKhrDedicatedAllocation)
7248 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7249 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7250 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7251 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7253 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7255 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7256 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7258 if(pVulkanFunctions != VMA_NULL)
7260 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7261 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7262 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7263 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7264 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7265 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7266 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7267 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7268 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7269 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7270 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7271 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7272 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7273 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7274 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7275 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7278 #undef VMA_COPY_IF_NOT_NULL 7282 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7283 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7284 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7285 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7286 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7287 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7288 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7289 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7290 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7291 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7292 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7293 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7294 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7295 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7296 if(m_UseKhrDedicatedAllocation)
7298 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7299 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7303 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7305 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7306 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7307 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7308 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7311 VkResult VmaAllocator_T::AllocateMemoryOfType(
7312 const VkMemoryRequirements& vkMemReq,
7313 bool dedicatedAllocation,
7314 VkBuffer dedicatedBuffer,
7315 VkImage dedicatedImage,
7317 uint32_t memTypeIndex,
7318 VmaSuballocationType suballocType,
7319 VmaAllocation* pAllocation)
7321 VMA_ASSERT(pAllocation != VMA_NULL);
7322 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7328 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7333 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7334 VMA_ASSERT(blockVector);
7336 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7337 bool preferDedicatedMemory =
7338 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7339 dedicatedAllocation ||
7341 vkMemReq.size > preferredBlockSize / 2;
7343 if(preferDedicatedMemory &&
7345 finalCreateInfo.
pool == VK_NULL_HANDLE)
7354 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7358 return AllocateDedicatedMemory(
7372 VkResult res = blockVector->Allocate(
7374 m_CurrentFrameIndex.load(),
7379 if(res == VK_SUCCESS)
7387 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7391 res = AllocateDedicatedMemory(
7397 finalCreateInfo.pUserData,
7401 if(res == VK_SUCCESS)
7404 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7410 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7417 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7419 VmaSuballocationType suballocType,
7420 uint32_t memTypeIndex,
7422 bool isUserDataString,
7424 VkBuffer dedicatedBuffer,
7425 VkImage dedicatedImage,
7426 VmaAllocation* pAllocation)
7428 VMA_ASSERT(pAllocation);
7430 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7431 allocInfo.memoryTypeIndex = memTypeIndex;
7432 allocInfo.allocationSize = size;
7434 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7435 if(m_UseKhrDedicatedAllocation)
7437 if(dedicatedBuffer != VK_NULL_HANDLE)
7439 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7440 dedicatedAllocInfo.buffer = dedicatedBuffer;
7441 allocInfo.pNext = &dedicatedAllocInfo;
7443 else if(dedicatedImage != VK_NULL_HANDLE)
7445 dedicatedAllocInfo.image = dedicatedImage;
7446 allocInfo.pNext = &dedicatedAllocInfo;
7451 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7452 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7455 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7459 void* pMappedData = VMA_NULL;
7462 res = (*m_VulkanFunctions.vkMapMemory)(
7471 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7472 FreeVulkanMemory(memTypeIndex, size, hMemory);
7477 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7478 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7479 (*pAllocation)->SetUserData(
this, pUserData);
7483 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7484 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7485 VMA_ASSERT(pDedicatedAllocations);
7486 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7489 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7494 void VmaAllocator_T::GetBufferMemoryRequirements(
7496 VkMemoryRequirements& memReq,
7497 bool& requiresDedicatedAllocation,
7498 bool& prefersDedicatedAllocation)
const 7500 if(m_UseKhrDedicatedAllocation)
7502 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7503 memReqInfo.buffer = hBuffer;
7505 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7507 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7508 memReq2.pNext = &memDedicatedReq;
7510 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7512 memReq = memReq2.memoryRequirements;
7513 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7514 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7518 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7519 requiresDedicatedAllocation =
false;
7520 prefersDedicatedAllocation =
false;
7524 void VmaAllocator_T::GetImageMemoryRequirements(
7526 VkMemoryRequirements& memReq,
7527 bool& requiresDedicatedAllocation,
7528 bool& prefersDedicatedAllocation)
const 7530 if(m_UseKhrDedicatedAllocation)
7532 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7533 memReqInfo.image = hImage;
7535 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7537 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7538 memReq2.pNext = &memDedicatedReq;
7540 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7542 memReq = memReq2.memoryRequirements;
7543 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7544 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7548 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7549 requiresDedicatedAllocation =
false;
7550 prefersDedicatedAllocation =
false;
7554 VkResult VmaAllocator_T::AllocateMemory(
7555 const VkMemoryRequirements& vkMemReq,
7556 bool requiresDedicatedAllocation,
7557 bool prefersDedicatedAllocation,
7558 VkBuffer dedicatedBuffer,
7559 VkImage dedicatedImage,
7561 VmaSuballocationType suballocType,
7562 VmaAllocation* pAllocation)
7567 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7568 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7573 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7574 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7576 if(requiresDedicatedAllocation)
7580 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7581 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7583 if(createInfo.
pool != VK_NULL_HANDLE)
7585 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7586 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7589 if((createInfo.
pool != VK_NULL_HANDLE) &&
7592 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7593 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7596 if(createInfo.
pool != VK_NULL_HANDLE)
7598 return createInfo.
pool->m_BlockVector.Allocate(
7600 m_CurrentFrameIndex.load(),
7609 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7610 uint32_t memTypeIndex = UINT32_MAX;
7612 if(res == VK_SUCCESS)
7614 res = AllocateMemoryOfType(
7616 requiresDedicatedAllocation || prefersDedicatedAllocation,
7624 if(res == VK_SUCCESS)
7634 memoryTypeBits &= ~(1u << memTypeIndex);
7637 if(res == VK_SUCCESS)
7639 res = AllocateMemoryOfType(
7641 requiresDedicatedAllocation || prefersDedicatedAllocation,
7649 if(res == VK_SUCCESS)
7659 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7670 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7672 VMA_ASSERT(allocation);
7674 if(allocation->CanBecomeLost() ==
false ||
7675 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7677 switch(allocation->GetType())
7679 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7681 VmaBlockVector* pBlockVector = VMA_NULL;
7682 VmaPool hPool = allocation->GetPool();
7683 if(hPool != VK_NULL_HANDLE)
7685 pBlockVector = &hPool->m_BlockVector;
7689 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7690 pBlockVector = m_pBlockVectors[memTypeIndex];
7692 pBlockVector->Free(allocation);
7695 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7696 FreeDedicatedMemory(allocation);
7703 allocation->SetUserData(
this, VMA_NULL);
7704 vma_delete(
this, allocation);
7707 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7710 InitStatInfo(pStats->
total);
7711 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7713 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7717 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7719 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7720 VMA_ASSERT(pBlockVector);
7721 pBlockVector->AddStats(pStats);
7726 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7727 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7729 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7734 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7736 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7737 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7738 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7739 VMA_ASSERT(pDedicatedAllocVector);
7740 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7743 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7744 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7745 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7746 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7751 VmaPostprocessCalcStatInfo(pStats->
total);
7752 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7753 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7754 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7755 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7758 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7760 VkResult VmaAllocator_T::Defragment(
7761 VmaAllocation* pAllocations,
7762 size_t allocationCount,
7763 VkBool32* pAllocationsChanged,
7767 if(pAllocationsChanged != VMA_NULL)
7769 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7771 if(pDefragmentationStats != VMA_NULL)
7773 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7776 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7778 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7780 const size_t poolCount = m_Pools.size();
7783 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7785 VmaAllocation hAlloc = pAllocations[allocIndex];
7787 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7789 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7791 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7793 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7795 VmaBlockVector* pAllocBlockVector = VMA_NULL;
7797 const VmaPool hAllocPool = hAlloc->GetPool();
7799 if(hAllocPool != VK_NULL_HANDLE)
7801 pAllocBlockVector = &hAllocPool->GetBlockVector();
7806 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7809 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7811 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7812 &pAllocationsChanged[allocIndex] : VMA_NULL;
7813 pDefragmentator->AddAllocation(hAlloc, pChanged);
7817 VkResult result = VK_SUCCESS;
7821 VkDeviceSize maxBytesToMove = SIZE_MAX;
7822 uint32_t maxAllocationsToMove = UINT32_MAX;
7823 if(pDefragmentationInfo != VMA_NULL)
7830 for(uint32_t memTypeIndex = 0;
7831 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7835 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7837 result = m_pBlockVectors[memTypeIndex]->Defragment(
7838 pDefragmentationStats,
7840 maxAllocationsToMove);
7845 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7847 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7848 pDefragmentationStats,
7850 maxAllocationsToMove);
7856 for(
size_t poolIndex = poolCount; poolIndex--; )
7858 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7862 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7864 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7866 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7873 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7875 if(hAllocation->CanBecomeLost())
7881 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7882 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7885 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7889 pAllocationInfo->
offset = 0;
7890 pAllocationInfo->
size = hAllocation->GetSize();
7892 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7895 else if(localLastUseFrameIndex == localCurrFrameIndex)
7897 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7898 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7899 pAllocationInfo->
offset = hAllocation->GetOffset();
7900 pAllocationInfo->
size = hAllocation->GetSize();
7902 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7907 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7909 localLastUseFrameIndex = localCurrFrameIndex;
7916 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7917 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7918 pAllocationInfo->
offset = hAllocation->GetOffset();
7919 pAllocationInfo->
size = hAllocation->GetSize();
7920 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7921 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7925 bool VmaAllocator_T::TouchAllocation(VmaAllocation hAllocation)
7928 if(hAllocation->CanBecomeLost())
7930 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7931 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7934 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7938 else if(localLastUseFrameIndex == localCurrFrameIndex)
7944 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7946 localLastUseFrameIndex = localCurrFrameIndex;
7957 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7959 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7972 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7974 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7975 if(res != VK_SUCCESS)
7977 vma_delete(
this, *pPool);
7984 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7985 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7991 void VmaAllocator_T::DestroyPool(VmaPool pool)
7995 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7996 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7997 VMA_ASSERT(success &&
"Pool not found in Allocator.");
8000 vma_delete(
this, pool);
8003 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
8005 pool->m_BlockVector.GetPoolStats(pPoolStats);
8008 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
8010 m_CurrentFrameIndex.store(frameIndex);
8013 void VmaAllocator_T::MakePoolAllocationsLost(
8015 size_t* pLostAllocationCount)
8017 hPool->m_BlockVector.MakePoolAllocationsLost(
8018 m_CurrentFrameIndex.load(),
8019 pLostAllocationCount);
8022 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
8024 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
8025 (*pAllocation)->InitLost();
8028 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
8030 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
8033 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8035 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8036 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
8038 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8039 if(res == VK_SUCCESS)
8041 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
8046 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
8051 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8054 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
8056 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
8062 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
8064 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
8066 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
8069 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8071 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8072 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8074 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8075 m_HeapSizeLimit[heapIndex] += size;
8079 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
8081 if(hAllocation->CanBecomeLost())
8083 return VK_ERROR_MEMORY_MAP_FAILED;
8086 switch(hAllocation->GetType())
8088 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8090 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8091 char *pBytes = VMA_NULL;
8092 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
8093 if(res == VK_SUCCESS)
8095 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8096 hAllocation->BlockAllocMap();
8100 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8101 return hAllocation->DedicatedAllocMap(
this, ppData);
8104 return VK_ERROR_MEMORY_MAP_FAILED;
8108 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
8110 switch(hAllocation->GetType())
8112 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8114 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8115 hAllocation->BlockAllocUnmap();
8116 pBlock->Unmap(
this, 1);
8119 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8120 hAllocation->DedicatedAllocUnmap(
this);
8127 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
8129 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8131 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8133 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8134 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8135 VMA_ASSERT(pDedicatedAllocations);
8136 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8137 VMA_ASSERT(success);
8140 VkDeviceMemory hMemory = allocation->GetMemory();
8142 if(allocation->GetMappedData() != VMA_NULL)
8144 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8147 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8149 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8152 #if VMA_STATS_STRING_ENABLED 8154 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8156 bool dedicatedAllocationsStarted =
false;
8157 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8159 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8160 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8161 VMA_ASSERT(pDedicatedAllocVector);
8162 if(pDedicatedAllocVector->empty() ==
false)
8164 if(dedicatedAllocationsStarted ==
false)
8166 dedicatedAllocationsStarted =
true;
8167 json.WriteString(
"DedicatedAllocations");
8171 json.BeginString(
"Type ");
8172 json.ContinueString(memTypeIndex);
8177 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8179 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
8180 json.BeginObject(
true);
8182 json.WriteString(
"Type");
8183 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
8185 json.WriteString(
"Size");
8186 json.WriteNumber(hAlloc->GetSize());
8188 const void* pUserData = hAlloc->GetUserData();
8189 if(pUserData != VMA_NULL)
8191 json.WriteString(
"UserData");
8192 if(hAlloc->IsUserDataString())
8194 json.WriteString((
const char*)pUserData);
8199 json.ContinueString_Pointer(pUserData);
8210 if(dedicatedAllocationsStarted)
8216 bool allocationsStarted =
false;
8217 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8219 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8221 if(allocationsStarted ==
false)
8223 allocationsStarted =
true;
8224 json.WriteString(
"DefaultPools");
8228 json.BeginString(
"Type ");
8229 json.ContinueString(memTypeIndex);
8232 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8235 if(allocationsStarted)
8242 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8243 const size_t poolCount = m_Pools.size();
8246 json.WriteString(
"Pools");
8248 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8250 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8257 #endif // #if VMA_STATS_STRING_ENABLED 8259 static VkResult AllocateMemoryForImage(
8260 VmaAllocator allocator,
8263 VmaSuballocationType suballocType,
8264 VmaAllocation* pAllocation)
8266 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8268 VkMemoryRequirements vkMemReq = {};
8269 bool requiresDedicatedAllocation =
false;
8270 bool prefersDedicatedAllocation =
false;
8271 allocator->GetImageMemoryRequirements(image, vkMemReq,
8272 requiresDedicatedAllocation, prefersDedicatedAllocation);
8274 return allocator->AllocateMemory(
8276 requiresDedicatedAllocation,
8277 prefersDedicatedAllocation,
8280 *pAllocationCreateInfo,
8290 VmaAllocator* pAllocator)
8292 VMA_ASSERT(pCreateInfo && pAllocator);
8293 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8299 VmaAllocator allocator)
8301 if(allocator != VK_NULL_HANDLE)
8303 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8304 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8305 vma_delete(&allocationCallbacks, allocator);
8310 VmaAllocator allocator,
8311 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8313 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8314 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8318 VmaAllocator allocator,
8319 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8321 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8322 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8326 VmaAllocator allocator,
8327 uint32_t memoryTypeIndex,
8328 VkMemoryPropertyFlags* pFlags)
8330 VMA_ASSERT(allocator && pFlags);
8331 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8332 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8336 VmaAllocator allocator,
8337 uint32_t frameIndex)
8339 VMA_ASSERT(allocator);
8340 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8342 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8344 allocator->SetCurrentFrameIndex(frameIndex);
8348 VmaAllocator allocator,
8351 VMA_ASSERT(allocator && pStats);
8352 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8353 allocator->CalculateStats(pStats);
8356 #if VMA_STATS_STRING_ENABLED 8359 VmaAllocator allocator,
8360 char** ppStatsString,
8361 VkBool32 detailedMap)
8363 VMA_ASSERT(allocator && ppStatsString);
8364 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8366 VmaStringBuilder sb(allocator);
8368 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8372 allocator->CalculateStats(&stats);
8374 json.WriteString(
"Total");
8375 VmaPrintStatInfo(json, stats.
total);
8377 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8379 json.BeginString(
"Heap ");
8380 json.ContinueString(heapIndex);
8384 json.WriteString(
"Size");
8385 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8387 json.WriteString(
"Flags");
8388 json.BeginArray(
true);
8389 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8391 json.WriteString(
"DEVICE_LOCAL");
8397 json.WriteString(
"Stats");
8398 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8401 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8403 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8405 json.BeginString(
"Type ");
8406 json.ContinueString(typeIndex);
8411 json.WriteString(
"Flags");
8412 json.BeginArray(
true);
8413 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8414 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8416 json.WriteString(
"DEVICE_LOCAL");
8418 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8420 json.WriteString(
"HOST_VISIBLE");
8422 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8424 json.WriteString(
"HOST_COHERENT");
8426 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8428 json.WriteString(
"HOST_CACHED");
8430 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8432 json.WriteString(
"LAZILY_ALLOCATED");
8438 json.WriteString(
"Stats");
8439 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8448 if(detailedMap == VK_TRUE)
8450 allocator->PrintDetailedMap(json);
8456 const size_t len = sb.GetLength();
8457 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8460 memcpy(pChars, sb.GetData(), len);
8463 *ppStatsString = pChars;
8467 VmaAllocator allocator,
8470 if(pStatsString != VMA_NULL)
8472 VMA_ASSERT(allocator);
8473 size_t len = strlen(pStatsString);
8474 vma_delete_array(allocator, pStatsString, len + 1);
8478 #endif // #if VMA_STATS_STRING_ENABLED 8484 VmaAllocator allocator,
8485 uint32_t memoryTypeBits,
8487 uint32_t* pMemoryTypeIndex)
8489 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8490 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8491 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8498 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8502 switch(pAllocationCreateInfo->
usage)
8507 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8510 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8513 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8514 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8517 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8518 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8524 *pMemoryTypeIndex = UINT32_MAX;
8525 uint32_t minCost = UINT32_MAX;
8526 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8527 memTypeIndex < allocator->GetMemoryTypeCount();
8528 ++memTypeIndex, memTypeBit <<= 1)
8531 if((memTypeBit & memoryTypeBits) != 0)
8533 const VkMemoryPropertyFlags currFlags =
8534 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8536 if((requiredFlags & ~currFlags) == 0)
8539 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8541 if(currCost < minCost)
8543 *pMemoryTypeIndex = memTypeIndex;
8553 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8557 VmaAllocator allocator,
8558 const VkBufferCreateInfo* pBufferCreateInfo,
8560 uint32_t* pMemoryTypeIndex)
8562 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8563 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8564 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8565 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8567 const VkDevice hDev = allocator->m_hDevice;
8568 VkBuffer hBuffer = VK_NULL_HANDLE;
8569 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
8570 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
8571 if(res == VK_SUCCESS)
8573 VkMemoryRequirements memReq = {};
8574 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
8575 hDev, hBuffer, &memReq);
8579 memReq.memoryTypeBits,
8580 pAllocationCreateInfo,
8583 allocator->GetVulkanFunctions().vkDestroyBuffer(
8584 hDev, hBuffer, allocator->GetAllocationCallbacks());
8590 VmaAllocator allocator,
8591 const VkImageCreateInfo* pImageCreateInfo,
8593 uint32_t* pMemoryTypeIndex)
8595 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8596 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
8597 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8598 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8600 const VkDevice hDev = allocator->m_hDevice;
8601 VkImage hImage = VK_NULL_HANDLE;
8602 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
8603 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
8604 if(res == VK_SUCCESS)
8606 VkMemoryRequirements memReq = {};
8607 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
8608 hDev, hImage, &memReq);
8612 memReq.memoryTypeBits,
8613 pAllocationCreateInfo,
8616 allocator->GetVulkanFunctions().vkDestroyImage(
8617 hDev, hImage, allocator->GetAllocationCallbacks());
8623 VmaAllocator allocator,
8627 VMA_ASSERT(allocator && pCreateInfo && pPool);
8629 VMA_DEBUG_LOG(
"vmaCreatePool");
8631 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8633 return allocator->CreatePool(pCreateInfo, pPool);
8637 VmaAllocator allocator,
8640 VMA_ASSERT(allocator);
8642 if(pool == VK_NULL_HANDLE)
8647 VMA_DEBUG_LOG(
"vmaDestroyPool");
8649 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8651 allocator->DestroyPool(pool);
8655 VmaAllocator allocator,
8659 VMA_ASSERT(allocator && pool && pPoolStats);
8661 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8663 allocator->GetPoolStats(pool, pPoolStats);
8667 VmaAllocator allocator,
8669 size_t* pLostAllocationCount)
8671 VMA_ASSERT(allocator && pool);
8673 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8675 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8679 VmaAllocator allocator,
8680 const VkMemoryRequirements* pVkMemoryRequirements,
8682 VmaAllocation* pAllocation,
8685 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8687 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8689 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8691 VkResult result = allocator->AllocateMemory(
8692 *pVkMemoryRequirements,
8698 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8701 if(pAllocationInfo && result == VK_SUCCESS)
8703 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8710 VmaAllocator allocator,
8713 VmaAllocation* pAllocation,
8716 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8718 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8720 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8722 VkMemoryRequirements vkMemReq = {};
8723 bool requiresDedicatedAllocation =
false;
8724 bool prefersDedicatedAllocation =
false;
8725 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8726 requiresDedicatedAllocation,
8727 prefersDedicatedAllocation);
8729 VkResult result = allocator->AllocateMemory(
8731 requiresDedicatedAllocation,
8732 prefersDedicatedAllocation,
8736 VMA_SUBALLOCATION_TYPE_BUFFER,
8739 if(pAllocationInfo && result == VK_SUCCESS)
8741 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8748 VmaAllocator allocator,
8751 VmaAllocation* pAllocation,
8754 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8756 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8758 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8760 VkResult result = AllocateMemoryForImage(
8764 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8767 if(pAllocationInfo && result == VK_SUCCESS)
8769 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8776 VmaAllocator allocator,
8777 VmaAllocation allocation)
8779 VMA_ASSERT(allocator && allocation);
8781 VMA_DEBUG_LOG(
"vmaFreeMemory");
8783 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8785 allocator->FreeMemory(allocation);
8789 VmaAllocator allocator,
8790 VmaAllocation allocation,
8793 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8795 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8797 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8801 VmaAllocator allocator,
8802 VmaAllocation allocation)
8804 VMA_ASSERT(allocator && allocation);
8806 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8808 return allocator->TouchAllocation(allocation);
8812 VmaAllocator allocator,
8813 VmaAllocation allocation,
8816 VMA_ASSERT(allocator && allocation);
8818 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8820 allocation->SetUserData(allocator, pUserData);
8824 VmaAllocator allocator,
8825 VmaAllocation* pAllocation)
8827 VMA_ASSERT(allocator && pAllocation);
8829 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8831 allocator->CreateLostAllocation(pAllocation);
8835 VmaAllocator allocator,
8836 VmaAllocation allocation,
8839 VMA_ASSERT(allocator && allocation && ppData);
8841 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8843 return allocator->Map(allocation, ppData);
8847 VmaAllocator allocator,
8848 VmaAllocation allocation)
8850 VMA_ASSERT(allocator && allocation);
8852 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8854 allocator->Unmap(allocation);
8858 VmaAllocator allocator,
8859 VmaAllocation* pAllocations,
8860 size_t allocationCount,
8861 VkBool32* pAllocationsChanged,
8865 VMA_ASSERT(allocator && pAllocations);
8867 VMA_DEBUG_LOG(
"vmaDefragment");
8869 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8871 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8875 VmaAllocator allocator,
8876 const VkBufferCreateInfo* pBufferCreateInfo,
8879 VmaAllocation* pAllocation,
8882 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8884 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8886 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8888 *pBuffer = VK_NULL_HANDLE;
8889 *pAllocation = VK_NULL_HANDLE;
8892 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8893 allocator->m_hDevice,
8895 allocator->GetAllocationCallbacks(),
8900 VkMemoryRequirements vkMemReq = {};
8901 bool requiresDedicatedAllocation =
false;
8902 bool prefersDedicatedAllocation =
false;
8903 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8904 requiresDedicatedAllocation, prefersDedicatedAllocation);
8908 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8910 VMA_ASSERT(vkMemReq.alignment %
8911 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8913 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8915 VMA_ASSERT(vkMemReq.alignment %
8916 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8918 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8920 VMA_ASSERT(vkMemReq.alignment %
8921 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8925 res = allocator->AllocateMemory(
8927 requiresDedicatedAllocation,
8928 prefersDedicatedAllocation,
8931 *pAllocationCreateInfo,
8932 VMA_SUBALLOCATION_TYPE_BUFFER,
8937 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8938 allocator->m_hDevice,
8940 (*pAllocation)->GetMemory(),
8941 (*pAllocation)->GetOffset());
8945 if(pAllocationInfo != VMA_NULL)
8947 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8951 allocator->FreeMemory(*pAllocation);
8952 *pAllocation = VK_NULL_HANDLE;
8953 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8954 *pBuffer = VK_NULL_HANDLE;
8957 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8958 *pBuffer = VK_NULL_HANDLE;
8965 VmaAllocator allocator,
8967 VmaAllocation allocation)
8969 if(buffer != VK_NULL_HANDLE)
8971 VMA_ASSERT(allocator);
8973 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8975 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8977 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8979 allocator->FreeMemory(allocation);
8984 VmaAllocator allocator,
8985 const VkImageCreateInfo* pImageCreateInfo,
8988 VmaAllocation* pAllocation,
8991 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8993 VMA_DEBUG_LOG(
"vmaCreateImage");
8995 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8997 *pImage = VK_NULL_HANDLE;
8998 *pAllocation = VK_NULL_HANDLE;
9001 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
9002 allocator->m_hDevice,
9004 allocator->GetAllocationCallbacks(),
9008 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
9009 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
9010 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
9013 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
9017 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
9018 allocator->m_hDevice,
9020 (*pAllocation)->GetMemory(),
9021 (*pAllocation)->GetOffset());
9025 if(pAllocationInfo != VMA_NULL)
9027 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9031 allocator->FreeMemory(*pAllocation);
9032 *pAllocation = VK_NULL_HANDLE;
9033 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9034 *pImage = VK_NULL_HANDLE;
9037 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9038 *pImage = VK_NULL_HANDLE;
9045 VmaAllocator allocator,
9047 VmaAllocation allocation)
9049 if(image != VK_NULL_HANDLE)
9051 VMA_ASSERT(allocator);
9053 VMA_DEBUG_LOG(
"vmaDestroyImage");
9055 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9057 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
9059 allocator->FreeMemory(allocation);
9063 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1004
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1258
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1029
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1014
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1215
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1008
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1564
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1026
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1763
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1434
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1488
Definition: vk_mem_alloc.h:1295
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:997
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1333
Definition: vk_mem_alloc.h:1242
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1038
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1091
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1023
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1246
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1156
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1011
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1155
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1019
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1767
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1055
VmaStatInfo total
Definition: vk_mem_alloc.h:1165
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1775
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1317
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1758
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1012
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:939
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1032
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1442
Definition: vk_mem_alloc.h:1436
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1574
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1009
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1354
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1458
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1494
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:995
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1445
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1193
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1753
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1771
Definition: vk_mem_alloc.h:1232
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1341
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1010
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1161
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:945
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:966
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:971
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1773
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1328
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1504
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1005
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1144
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1453
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:958
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1302
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1157
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:962
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1448
Definition: vk_mem_alloc.h:1241
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1323
Definition: vk_mem_alloc.h:1314
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1147
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1007
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1466
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1041
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1497
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1312
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1347
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1079
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1163
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1282
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1156
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1016
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:960
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1015
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1480
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1588
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1035
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1156
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1153
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1485
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1569
Definition: vk_mem_alloc.h:1310
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1769
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1003
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1018
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1151
Definition: vk_mem_alloc.h:1198
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1438
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1149
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1013
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1017
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1269
Definition: vk_mem_alloc.h:1225
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1583
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:993
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1006
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1550
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1416
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1157
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1308
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1164
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1491
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1157
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1555