23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 873 #include <vulkan/vulkan.h> 875 VK_DEFINE_HANDLE(VmaAllocator)
879 VmaAllocator allocator,
881 VkDeviceMemory memory,
885 VmaAllocator allocator,
887 VkDeviceMemory memory,
1036 VmaAllocator* pAllocator);
1040 VmaAllocator allocator);
1047 VmaAllocator allocator,
1048 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1055 VmaAllocator allocator,
1056 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1065 VmaAllocator allocator,
1066 uint32_t memoryTypeIndex,
1067 VkMemoryPropertyFlags* pFlags);
1078 VmaAllocator allocator,
1079 uint32_t frameIndex);
1109 VmaAllocator allocator,
1112 #define VMA_STATS_STRING_ENABLED 1 1114 #if VMA_STATS_STRING_ENABLED 1120 VmaAllocator allocator,
1121 char** ppStatsString,
1122 VkBool32 detailedMap);
1125 VmaAllocator allocator,
1126 char* pStatsString);
1128 #endif // #if VMA_STATS_STRING_ENABLED 1130 VK_DEFINE_HANDLE(VmaPool)
1313 VmaAllocator allocator,
1314 uint32_t memoryTypeBits,
1316 uint32_t* pMemoryTypeIndex);
1331 VmaAllocator allocator,
1332 const VkBufferCreateInfo* pBufferCreateInfo,
1334 uint32_t* pMemoryTypeIndex);
1349 VmaAllocator allocator,
1350 const VkImageCreateInfo* pImageCreateInfo,
1352 uint32_t* pMemoryTypeIndex);
1453 VmaAllocator allocator,
1460 VmaAllocator allocator,
1470 VmaAllocator allocator,
1481 VmaAllocator allocator,
1483 size_t* pLostAllocationCount);
1485 VK_DEFINE_HANDLE(VmaAllocation)
1541 VmaAllocator allocator,
1542 const VkMemoryRequirements* pVkMemoryRequirements,
1544 VmaAllocation* pAllocation,
1554 VmaAllocator allocator,
1557 VmaAllocation* pAllocation,
1562 VmaAllocator allocator,
1565 VmaAllocation* pAllocation,
1570 VmaAllocator allocator,
1571 VmaAllocation allocation);
1578 VmaAllocator allocator,
1579 VmaAllocation allocation,
1585 VmaAllocator allocator,
1586 VmaAllocation allocation);
1602 VmaAllocator allocator,
1603 VmaAllocation allocation,
1617 VmaAllocator allocator,
1618 VmaAllocation* pAllocation);
1655 VmaAllocator allocator,
1656 VmaAllocation allocation,
1664 VmaAllocator allocator,
1665 VmaAllocation allocation);
1776 VmaAllocator allocator,
1777 VmaAllocation* pAllocations,
1778 size_t allocationCount,
1779 VkBool32* pAllocationsChanged,
1810 VmaAllocator allocator,
1811 const VkBufferCreateInfo* pBufferCreateInfo,
1814 VmaAllocation* pAllocation,
1829 VmaAllocator allocator,
1831 VmaAllocation allocation);
1835 VmaAllocator allocator,
1836 const VkImageCreateInfo* pImageCreateInfo,
1839 VmaAllocation* pAllocation,
1854 VmaAllocator allocator,
1856 VmaAllocation allocation);
1862 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1865 #ifdef __INTELLISENSE__ 1866 #define VMA_IMPLEMENTATION 1869 #ifdef VMA_IMPLEMENTATION 1870 #undef VMA_IMPLEMENTATION 1892 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 1893 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1905 #if VMA_USE_STL_CONTAINERS 1906 #define VMA_USE_STL_VECTOR 1 1907 #define VMA_USE_STL_UNORDERED_MAP 1 1908 #define VMA_USE_STL_LIST 1 1911 #if VMA_USE_STL_VECTOR 1915 #if VMA_USE_STL_UNORDERED_MAP 1916 #include <unordered_map> 1919 #if VMA_USE_STL_LIST 1928 #include <algorithm> 1932 #if !defined(_WIN32) && !defined(__APPLE__) 1938 #define VMA_NULL nullptr 1941 #if defined(__APPLE__) || defined(__ANDROID__) 1943 void *aligned_alloc(
size_t alignment,
size_t size)
1946 if(alignment <
sizeof(
void*))
1948 alignment =
sizeof(
void*);
1952 if(posix_memalign(&pointer, alignment, size) == 0)
1961 #define VMA_ASSERT(expr) assert(expr) 1963 #define VMA_ASSERT(expr) 1969 #ifndef VMA_HEAVY_ASSERT 1971 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1973 #define VMA_HEAVY_ASSERT(expr) 1977 #ifndef VMA_ALIGN_OF 1978 #define VMA_ALIGN_OF(type) (__alignof(type)) 1981 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1983 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1985 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1989 #ifndef VMA_SYSTEM_FREE 1991 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1993 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1998 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2002 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2006 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2010 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2013 #ifndef VMA_DEBUG_LOG 2014 #define VMA_DEBUG_LOG(format, ...) 2024 #if VMA_STATS_STRING_ENABLED 2025 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2027 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2029 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2031 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2033 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2035 snprintf(outStr, strLen,
"%p", ptr);
2045 void Lock() { m_Mutex.lock(); }
2046 void Unlock() { m_Mutex.unlock(); }
2050 #define VMA_MUTEX VmaMutex 2061 #ifndef VMA_ATOMIC_UINT32 2062 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2065 #ifndef VMA_BEST_FIT 2078 #define VMA_BEST_FIT (1) 2081 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2086 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2089 #ifndef VMA_DEBUG_ALIGNMENT 2094 #define VMA_DEBUG_ALIGNMENT (1) 2097 #ifndef VMA_DEBUG_MARGIN 2102 #define VMA_DEBUG_MARGIN (0) 2105 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2110 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2113 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2118 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2121 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2122 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2126 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2127 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2131 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2137 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2138 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2141 static inline uint32_t VmaCountBitsSet(uint32_t v)
2143 uint32_t c = v - ((v >> 1) & 0x55555555);
2144 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2145 c = ((c >> 4) + c) & 0x0F0F0F0F;
2146 c = ((c >> 8) + c) & 0x00FF00FF;
2147 c = ((c >> 16) + c) & 0x0000FFFF;
2153 template <
typename T>
2154 static inline T VmaAlignUp(T val, T align)
2156 return (val + align - 1) / align * align;
2160 template <
typename T>
2161 inline T VmaRoundDiv(T x, T y)
2163 return (x + (y / (T)2)) / y;
2168 template<
typename Iterator,
typename Compare>
2169 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2171 Iterator centerValue = end; --centerValue;
2172 Iterator insertIndex = beg;
2173 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2175 if(cmp(*memTypeIndex, *centerValue))
2177 if(insertIndex != memTypeIndex)
2179 VMA_SWAP(*memTypeIndex, *insertIndex);
2184 if(insertIndex != centerValue)
2186 VMA_SWAP(*insertIndex, *centerValue);
2191 template<
typename Iterator,
typename Compare>
2192 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2196 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2197 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2198 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2202 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2204 #endif // #ifndef VMA_SORT 2213 static inline bool VmaBlocksOnSamePage(
2214 VkDeviceSize resourceAOffset,
2215 VkDeviceSize resourceASize,
2216 VkDeviceSize resourceBOffset,
2217 VkDeviceSize pageSize)
2219 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2220 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2221 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2222 VkDeviceSize resourceBStart = resourceBOffset;
2223 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2224 return resourceAEndPage == resourceBStartPage;
2227 enum VmaSuballocationType
2229 VMA_SUBALLOCATION_TYPE_FREE = 0,
2230 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2231 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2232 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2233 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2234 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2235 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2244 static inline bool VmaIsBufferImageGranularityConflict(
2245 VmaSuballocationType suballocType1,
2246 VmaSuballocationType suballocType2)
2248 if(suballocType1 > suballocType2)
2250 VMA_SWAP(suballocType1, suballocType2);
2253 switch(suballocType1)
2255 case VMA_SUBALLOCATION_TYPE_FREE:
2257 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2259 case VMA_SUBALLOCATION_TYPE_BUFFER:
2261 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2262 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2263 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2265 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2266 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2267 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2268 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2270 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2271 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2283 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2284 m_pMutex(useMutex ? &mutex : VMA_NULL)
2301 VMA_MUTEX* m_pMutex;
2304 #if VMA_DEBUG_GLOBAL_MUTEX 2305 static VMA_MUTEX gDebugGlobalMutex;
2306 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2308 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2312 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2323 template <
typename IterT,
typename KeyT,
typename CmpT>
2324 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2326 size_t down = 0, up = (end - beg);
2329 const size_t mid = (down + up) / 2;
2330 if(cmp(*(beg+mid), key))
2345 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2347 if((pAllocationCallbacks != VMA_NULL) &&
2348 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2350 return (*pAllocationCallbacks->pfnAllocation)(
2351 pAllocationCallbacks->pUserData,
2354 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2358 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2362 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2364 if((pAllocationCallbacks != VMA_NULL) &&
2365 (pAllocationCallbacks->pfnFree != VMA_NULL))
2367 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2371 VMA_SYSTEM_FREE(ptr);
2375 template<
typename T>
2376 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2378 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2381 template<
typename T>
2382 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2384 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2387 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2389 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2391 template<
typename T>
2392 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2395 VmaFree(pAllocationCallbacks, ptr);
2398 template<
typename T>
2399 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2403 for(
size_t i = count; i--; )
2407 VmaFree(pAllocationCallbacks, ptr);
2412 template<
typename T>
2413 class VmaStlAllocator
2416 const VkAllocationCallbacks*
const m_pCallbacks;
2417 typedef T value_type;
2419 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2420 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2422 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2423 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2425 template<
typename U>
2426 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2428 return m_pCallbacks == rhs.m_pCallbacks;
2430 template<
typename U>
2431 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2433 return m_pCallbacks != rhs.m_pCallbacks;
2436 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2439 #if VMA_USE_STL_VECTOR 2441 #define VmaVector std::vector 2443 template<
typename T,
typename allocatorT>
2444 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2446 vec.insert(vec.begin() + index, item);
2449 template<
typename T,
typename allocatorT>
2450 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2452 vec.erase(vec.begin() + index);
2455 #else // #if VMA_USE_STL_VECTOR 2460 template<
typename T,
typename AllocatorT>
2464 typedef T value_type;
2466 VmaVector(
const AllocatorT& allocator) :
2467 m_Allocator(allocator),
2474 VmaVector(
size_t count,
const AllocatorT& allocator) :
2475 m_Allocator(allocator),
2476 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2482 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2483 m_Allocator(src.m_Allocator),
2484 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2485 m_Count(src.m_Count),
2486 m_Capacity(src.m_Count)
2490 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2496 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2499 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2503 resize(rhs.m_Count);
2506 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2512 bool empty()
const {
return m_Count == 0; }
2513 size_t size()
const {
return m_Count; }
2514 T* data() {
return m_pArray; }
2515 const T* data()
const {
return m_pArray; }
2517 T& operator[](
size_t index)
2519 VMA_HEAVY_ASSERT(index < m_Count);
2520 return m_pArray[index];
2522 const T& operator[](
size_t index)
const 2524 VMA_HEAVY_ASSERT(index < m_Count);
2525 return m_pArray[index];
2530 VMA_HEAVY_ASSERT(m_Count > 0);
2533 const T& front()
const 2535 VMA_HEAVY_ASSERT(m_Count > 0);
2540 VMA_HEAVY_ASSERT(m_Count > 0);
2541 return m_pArray[m_Count - 1];
2543 const T& back()
const 2545 VMA_HEAVY_ASSERT(m_Count > 0);
2546 return m_pArray[m_Count - 1];
2549 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2551 newCapacity = VMA_MAX(newCapacity, m_Count);
2553 if((newCapacity < m_Capacity) && !freeMemory)
2555 newCapacity = m_Capacity;
2558 if(newCapacity != m_Capacity)
2560 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2563 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2565 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2566 m_Capacity = newCapacity;
2567 m_pArray = newArray;
2571 void resize(
size_t newCount,
bool freeMemory =
false)
2573 size_t newCapacity = m_Capacity;
2574 if(newCount > m_Capacity)
2576 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2580 newCapacity = newCount;
2583 if(newCapacity != m_Capacity)
2585 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2586 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2587 if(elementsToCopy != 0)
2589 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2591 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2592 m_Capacity = newCapacity;
2593 m_pArray = newArray;
2599 void clear(
bool freeMemory =
false)
2601 resize(0, freeMemory);
2604 void insert(
size_t index,
const T& src)
2606 VMA_HEAVY_ASSERT(index <= m_Count);
2607 const size_t oldCount = size();
2608 resize(oldCount + 1);
2609 if(index < oldCount)
2611 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2613 m_pArray[index] = src;
2616 void remove(
size_t index)
2618 VMA_HEAVY_ASSERT(index < m_Count);
2619 const size_t oldCount = size();
2620 if(index < oldCount - 1)
2622 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2624 resize(oldCount - 1);
2627 void push_back(
const T& src)
2629 const size_t newIndex = size();
2630 resize(newIndex + 1);
2631 m_pArray[newIndex] = src;
2636 VMA_HEAVY_ASSERT(m_Count > 0);
2640 void push_front(
const T& src)
2647 VMA_HEAVY_ASSERT(m_Count > 0);
2651 typedef T* iterator;
2653 iterator begin() {
return m_pArray; }
2654 iterator end() {
return m_pArray + m_Count; }
2657 AllocatorT m_Allocator;
2663 template<
typename T,
typename allocatorT>
2664 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2666 vec.insert(index, item);
2669 template<
typename T,
typename allocatorT>
2670 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2675 #endif // #if VMA_USE_STL_VECTOR 2677 template<
typename CmpLess,
typename VectorT>
2678 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2680 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2682 vector.data() + vector.size(),
2684 CmpLess()) - vector.data();
2685 VmaVectorInsert(vector, indexToInsert, value);
2686 return indexToInsert;
2689 template<
typename CmpLess,
typename VectorT>
2690 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2693 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2698 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2700 size_t indexToRemove = it - vector.begin();
2701 VmaVectorRemove(vector, indexToRemove);
2707 template<
typename CmpLess,
typename VectorT>
2708 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2711 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2713 vector.data() + vector.size(),
2716 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2718 return it - vector.begin();
2722 return vector.size();
2734 template<
typename T>
2735 class VmaPoolAllocator
2738 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2739 ~VmaPoolAllocator();
2747 uint32_t NextFreeIndex;
2754 uint32_t FirstFreeIndex;
2757 const VkAllocationCallbacks* m_pAllocationCallbacks;
2758 size_t m_ItemsPerBlock;
2759 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2761 ItemBlock& CreateNewBlock();
2764 template<
typename T>
2765 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2766 m_pAllocationCallbacks(pAllocationCallbacks),
2767 m_ItemsPerBlock(itemsPerBlock),
2768 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2770 VMA_ASSERT(itemsPerBlock > 0);
2773 template<
typename T>
2774 VmaPoolAllocator<T>::~VmaPoolAllocator()
2779 template<
typename T>
2780 void VmaPoolAllocator<T>::Clear()
2782 for(
size_t i = m_ItemBlocks.size(); i--; )
2783 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2784 m_ItemBlocks.clear();
2787 template<
typename T>
2788 T* VmaPoolAllocator<T>::Alloc()
2790 for(
size_t i = m_ItemBlocks.size(); i--; )
2792 ItemBlock& block = m_ItemBlocks[i];
2794 if(block.FirstFreeIndex != UINT32_MAX)
2796 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2797 block.FirstFreeIndex = pItem->NextFreeIndex;
2798 return &pItem->Value;
2803 ItemBlock& newBlock = CreateNewBlock();
2804 Item*
const pItem = &newBlock.pItems[0];
2805 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2806 return &pItem->Value;
2809 template<
typename T>
2810 void VmaPoolAllocator<T>::Free(T* ptr)
2813 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2815 ItemBlock& block = m_ItemBlocks[i];
2819 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2822 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2824 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2825 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2826 block.FirstFreeIndex = index;
2830 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2833 template<
typename T>
2834 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2836 ItemBlock newBlock = {
2837 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2839 m_ItemBlocks.push_back(newBlock);
2842 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2843 newBlock.pItems[i].NextFreeIndex = i + 1;
2844 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2845 return m_ItemBlocks.back();
2851 #if VMA_USE_STL_LIST 2853 #define VmaList std::list 2855 #else // #if VMA_USE_STL_LIST 2857 template<
typename T>
2866 template<
typename T>
2870 typedef VmaListItem<T> ItemType;
2872 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2876 size_t GetCount()
const {
return m_Count; }
2877 bool IsEmpty()
const {
return m_Count == 0; }
2879 ItemType* Front() {
return m_pFront; }
2880 const ItemType* Front()
const {
return m_pFront; }
2881 ItemType* Back() {
return m_pBack; }
2882 const ItemType* Back()
const {
return m_pBack; }
2884 ItemType* PushBack();
2885 ItemType* PushFront();
2886 ItemType* PushBack(
const T& value);
2887 ItemType* PushFront(
const T& value);
2892 ItemType* InsertBefore(ItemType* pItem);
2894 ItemType* InsertAfter(ItemType* pItem);
2896 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2897 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2899 void Remove(ItemType* pItem);
2902 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2903 VmaPoolAllocator<ItemType> m_ItemAllocator;
2909 VmaRawList(
const VmaRawList<T>& src);
2910 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2913 template<
typename T>
2914 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2915 m_pAllocationCallbacks(pAllocationCallbacks),
2916 m_ItemAllocator(pAllocationCallbacks, 128),
2923 template<
typename T>
2924 VmaRawList<T>::~VmaRawList()
2930 template<
typename T>
2931 void VmaRawList<T>::Clear()
2933 if(IsEmpty() ==
false)
2935 ItemType* pItem = m_pBack;
2936 while(pItem != VMA_NULL)
2938 ItemType*
const pPrevItem = pItem->pPrev;
2939 m_ItemAllocator.Free(pItem);
2942 m_pFront = VMA_NULL;
2948 template<
typename T>
2949 VmaListItem<T>* VmaRawList<T>::PushBack()
2951 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2952 pNewItem->pNext = VMA_NULL;
2955 pNewItem->pPrev = VMA_NULL;
2956 m_pFront = pNewItem;
2962 pNewItem->pPrev = m_pBack;
2963 m_pBack->pNext = pNewItem;
2970 template<
typename T>
2971 VmaListItem<T>* VmaRawList<T>::PushFront()
2973 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2974 pNewItem->pPrev = VMA_NULL;
2977 pNewItem->pNext = VMA_NULL;
2978 m_pFront = pNewItem;
2984 pNewItem->pNext = m_pFront;
2985 m_pFront->pPrev = pNewItem;
2986 m_pFront = pNewItem;
2992 template<
typename T>
2993 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2995 ItemType*
const pNewItem = PushBack();
2996 pNewItem->Value = value;
3000 template<
typename T>
3001 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3003 ItemType*
const pNewItem = PushFront();
3004 pNewItem->Value = value;
3008 template<
typename T>
3009 void VmaRawList<T>::PopBack()
3011 VMA_HEAVY_ASSERT(m_Count > 0);
3012 ItemType*
const pBackItem = m_pBack;
3013 ItemType*
const pPrevItem = pBackItem->pPrev;
3014 if(pPrevItem != VMA_NULL)
3016 pPrevItem->pNext = VMA_NULL;
3018 m_pBack = pPrevItem;
3019 m_ItemAllocator.Free(pBackItem);
3023 template<
typename T>
3024 void VmaRawList<T>::PopFront()
3026 VMA_HEAVY_ASSERT(m_Count > 0);
3027 ItemType*
const pFrontItem = m_pFront;
3028 ItemType*
const pNextItem = pFrontItem->pNext;
3029 if(pNextItem != VMA_NULL)
3031 pNextItem->pPrev = VMA_NULL;
3033 m_pFront = pNextItem;
3034 m_ItemAllocator.Free(pFrontItem);
3038 template<
typename T>
3039 void VmaRawList<T>::Remove(ItemType* pItem)
3041 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3042 VMA_HEAVY_ASSERT(m_Count > 0);
3044 if(pItem->pPrev != VMA_NULL)
3046 pItem->pPrev->pNext = pItem->pNext;
3050 VMA_HEAVY_ASSERT(m_pFront == pItem);
3051 m_pFront = pItem->pNext;
3054 if(pItem->pNext != VMA_NULL)
3056 pItem->pNext->pPrev = pItem->pPrev;
3060 VMA_HEAVY_ASSERT(m_pBack == pItem);
3061 m_pBack = pItem->pPrev;
3064 m_ItemAllocator.Free(pItem);
3068 template<
typename T>
3069 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3071 if(pItem != VMA_NULL)
3073 ItemType*
const prevItem = pItem->pPrev;
3074 ItemType*
const newItem = m_ItemAllocator.Alloc();
3075 newItem->pPrev = prevItem;
3076 newItem->pNext = pItem;
3077 pItem->pPrev = newItem;
3078 if(prevItem != VMA_NULL)
3080 prevItem->pNext = newItem;
3084 VMA_HEAVY_ASSERT(m_pFront == pItem);
3094 template<
typename T>
3095 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3097 if(pItem != VMA_NULL)
3099 ItemType*
const nextItem = pItem->pNext;
3100 ItemType*
const newItem = m_ItemAllocator.Alloc();
3101 newItem->pNext = nextItem;
3102 newItem->pPrev = pItem;
3103 pItem->pNext = newItem;
3104 if(nextItem != VMA_NULL)
3106 nextItem->pPrev = newItem;
3110 VMA_HEAVY_ASSERT(m_pBack == pItem);
3120 template<
typename T>
3121 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3123 ItemType*
const newItem = InsertBefore(pItem);
3124 newItem->Value = value;
3128 template<
typename T>
3129 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3131 ItemType*
const newItem = InsertAfter(pItem);
3132 newItem->Value = value;
3136 template<
typename T,
typename AllocatorT>
3149 T& operator*()
const 3151 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3152 return m_pItem->Value;
3154 T* operator->()
const 3156 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3157 return &m_pItem->Value;
3160 iterator& operator++()
3162 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3163 m_pItem = m_pItem->pNext;
3166 iterator& operator--()
3168 if(m_pItem != VMA_NULL)
3170 m_pItem = m_pItem->pPrev;
3174 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3175 m_pItem = m_pList->Back();
3180 iterator operator++(
int)
3182 iterator result = *
this;
3186 iterator operator--(
int)
3188 iterator result = *
this;
3193 bool operator==(
const iterator& rhs)
const 3195 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3196 return m_pItem == rhs.m_pItem;
3198 bool operator!=(
const iterator& rhs)
const 3200 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3201 return m_pItem != rhs.m_pItem;
3205 VmaRawList<T>* m_pList;
3206 VmaListItem<T>* m_pItem;
3208 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3214 friend class VmaList<T, AllocatorT>;
3217 class const_iterator
3226 const_iterator(
const iterator& src) :
3227 m_pList(src.m_pList),
3228 m_pItem(src.m_pItem)
3232 const T& operator*()
const 3234 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3235 return m_pItem->Value;
3237 const T* operator->()
const 3239 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3240 return &m_pItem->Value;
3243 const_iterator& operator++()
3245 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3246 m_pItem = m_pItem->pNext;
3249 const_iterator& operator--()
3251 if(m_pItem != VMA_NULL)
3253 m_pItem = m_pItem->pPrev;
3257 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3258 m_pItem = m_pList->Back();
3263 const_iterator operator++(
int)
3265 const_iterator result = *
this;
3269 const_iterator operator--(
int)
3271 const_iterator result = *
this;
3276 bool operator==(
const const_iterator& rhs)
const 3278 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3279 return m_pItem == rhs.m_pItem;
3281 bool operator!=(
const const_iterator& rhs)
const 3283 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3284 return m_pItem != rhs.m_pItem;
3288 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3294 const VmaRawList<T>* m_pList;
3295 const VmaListItem<T>* m_pItem;
3297 friend class VmaList<T, AllocatorT>;
3300 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3302 bool empty()
const {
return m_RawList.IsEmpty(); }
3303 size_t size()
const {
return m_RawList.GetCount(); }
3305 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3306 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3308 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3309 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3311 void clear() { m_RawList.Clear(); }
3312 void push_back(
const T& value) { m_RawList.PushBack(value); }
3313 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3314 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3317 VmaRawList<T> m_RawList;
3320 #endif // #if VMA_USE_STL_LIST 3328 #if VMA_USE_STL_UNORDERED_MAP 3330 #define VmaPair std::pair 3332 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3333 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3335 #else // #if VMA_USE_STL_UNORDERED_MAP 3337 template<
typename T1,
typename T2>
3343 VmaPair() : first(), second() { }
3344 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3350 template<
typename KeyT,
typename ValueT>
3354 typedef VmaPair<KeyT, ValueT> PairType;
3355 typedef PairType* iterator;
3357 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3359 iterator begin() {
return m_Vector.begin(); }
3360 iterator end() {
return m_Vector.end(); }
3362 void insert(
const PairType& pair);
3363 iterator find(
const KeyT& key);
3364 void erase(iterator it);
3367 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3370 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3372 template<
typename FirstT,
typename SecondT>
3373 struct VmaPairFirstLess
3375 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3377 return lhs.first < rhs.first;
3379 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3381 return lhs.first < rhsFirst;
3385 template<
typename KeyT,
typename ValueT>
3386 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3388 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3390 m_Vector.data() + m_Vector.size(),
3392 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3393 VmaVectorInsert(m_Vector, indexToInsert, pair);
3396 template<
typename KeyT,
typename ValueT>
3397 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3399 PairType* it = VmaBinaryFindFirstNotLess(
3401 m_Vector.data() + m_Vector.size(),
3403 VmaPairFirstLess<KeyT, ValueT>());
3404 if((it != m_Vector.end()) && (it->first == key))
3410 return m_Vector.end();
3414 template<
typename KeyT,
typename ValueT>
3415 void VmaMap<KeyT, ValueT>::erase(iterator it)
3417 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3420 #endif // #if VMA_USE_STL_UNORDERED_MAP 3426 class VmaDeviceMemoryBlock;
3428 struct VmaAllocation_T
3431 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3435 FLAG_USER_DATA_STRING = 0x01,
3439 enum ALLOCATION_TYPE
3441 ALLOCATION_TYPE_NONE,
3442 ALLOCATION_TYPE_BLOCK,
3443 ALLOCATION_TYPE_DEDICATED,
3446 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3449 m_pUserData(VMA_NULL),
3450 m_LastUseFrameIndex(currentFrameIndex),
3451 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3452 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3454 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3460 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3463 VMA_ASSERT(m_pUserData == VMA_NULL);
3466 void InitBlockAllocation(
3468 VmaDeviceMemoryBlock* block,
3469 VkDeviceSize offset,
3470 VkDeviceSize alignment,
3472 VmaSuballocationType suballocationType,
3476 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3477 VMA_ASSERT(block != VMA_NULL);
3478 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3479 m_Alignment = alignment;
3481 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3482 m_SuballocationType = (uint8_t)suballocationType;
3483 m_BlockAllocation.m_hPool = hPool;
3484 m_BlockAllocation.m_Block = block;
3485 m_BlockAllocation.m_Offset = offset;
3486 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3491 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3492 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3493 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3494 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3495 m_BlockAllocation.m_Block = VMA_NULL;
3496 m_BlockAllocation.m_Offset = 0;
3497 m_BlockAllocation.m_CanBecomeLost =
true;
3500 void ChangeBlockAllocation(
3501 VmaAllocator hAllocator,
3502 VmaDeviceMemoryBlock* block,
3503 VkDeviceSize offset);
3506 void InitDedicatedAllocation(
3507 uint32_t memoryTypeIndex,
3508 VkDeviceMemory hMemory,
3509 VmaSuballocationType suballocationType,
3513 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3514 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3515 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3518 m_SuballocationType = (uint8_t)suballocationType;
3519 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3520 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3521 m_DedicatedAllocation.m_hMemory = hMemory;
3522 m_DedicatedAllocation.m_pMappedData = pMappedData;
3525 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3526 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3527 VkDeviceSize GetSize()
const {
return m_Size; }
3528 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3529 void* GetUserData()
const {
return m_pUserData; }
3530 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3531 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3533 VmaDeviceMemoryBlock* GetBlock()
const 3535 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3536 return m_BlockAllocation.m_Block;
3538 VkDeviceSize GetOffset()
const;
3539 VkDeviceMemory GetMemory()
const;
3540 uint32_t GetMemoryTypeIndex()
const;
3541 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3542 void* GetMappedData()
const;
3543 bool CanBecomeLost()
const;
3544 VmaPool GetPool()
const;
3546 uint32_t GetLastUseFrameIndex()
const 3548 return m_LastUseFrameIndex.load();
3550 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3552 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3562 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3564 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3566 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3577 void BlockAllocMap();
3578 void BlockAllocUnmap();
3579 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3580 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3583 VkDeviceSize m_Alignment;
3584 VkDeviceSize m_Size;
3586 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3588 uint8_t m_SuballocationType;
3595 struct BlockAllocation
3598 VmaDeviceMemoryBlock* m_Block;
3599 VkDeviceSize m_Offset;
3600 bool m_CanBecomeLost;
3604 struct DedicatedAllocation
3606 uint32_t m_MemoryTypeIndex;
3607 VkDeviceMemory m_hMemory;
3608 void* m_pMappedData;
3614 BlockAllocation m_BlockAllocation;
3616 DedicatedAllocation m_DedicatedAllocation;
3619 void FreeUserDataString(VmaAllocator hAllocator);
3626 struct VmaSuballocation
3628 VkDeviceSize offset;
3630 VmaAllocation hAllocation;
3631 VmaSuballocationType type;
3634 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3637 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3652 struct VmaAllocationRequest
3654 VkDeviceSize offset;
3655 VkDeviceSize sumFreeSize;
3656 VkDeviceSize sumItemSize;
3657 VmaSuballocationList::iterator item;
3658 size_t itemsToMakeLostCount;
3660 VkDeviceSize CalcCost()
const 3662 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3670 class VmaBlockMetadata
3673 VmaBlockMetadata(VmaAllocator hAllocator);
3674 ~VmaBlockMetadata();
3675 void Init(VkDeviceSize size);
3678 bool Validate()
const;
3679 VkDeviceSize GetSize()
const {
return m_Size; }
3680 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3681 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3682 VkDeviceSize GetUnusedRangeSizeMax()
const;
3684 bool IsEmpty()
const;
3686 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3689 #if VMA_STATS_STRING_ENABLED 3690 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3694 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3699 bool CreateAllocationRequest(
3700 uint32_t currentFrameIndex,
3701 uint32_t frameInUseCount,
3702 VkDeviceSize bufferImageGranularity,
3703 VkDeviceSize allocSize,
3704 VkDeviceSize allocAlignment,
3705 VmaSuballocationType allocType,
3706 bool canMakeOtherLost,
3707 VmaAllocationRequest* pAllocationRequest);
3709 bool MakeRequestedAllocationsLost(
3710 uint32_t currentFrameIndex,
3711 uint32_t frameInUseCount,
3712 VmaAllocationRequest* pAllocationRequest);
3714 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3718 const VmaAllocationRequest& request,
3719 VmaSuballocationType type,
3720 VkDeviceSize allocSize,
3721 VmaAllocation hAllocation);
3724 void Free(
const VmaAllocation allocation);
3725 void FreeAtOffset(VkDeviceSize offset);
3728 VkDeviceSize m_Size;
3729 uint32_t m_FreeCount;
3730 VkDeviceSize m_SumFreeSize;
3731 VmaSuballocationList m_Suballocations;
3734 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3736 bool ValidateFreeSuballocationList()
const;
3740 bool CheckAllocation(
3741 uint32_t currentFrameIndex,
3742 uint32_t frameInUseCount,
3743 VkDeviceSize bufferImageGranularity,
3744 VkDeviceSize allocSize,
3745 VkDeviceSize allocAlignment,
3746 VmaSuballocationType allocType,
3747 VmaSuballocationList::const_iterator suballocItem,
3748 bool canMakeOtherLost,
3749 VkDeviceSize* pOffset,
3750 size_t* itemsToMakeLostCount,
3751 VkDeviceSize* pSumFreeSize,
3752 VkDeviceSize* pSumItemSize)
const;
3754 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3758 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3761 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3764 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3768 class VmaDeviceMemoryMapping
3771 VmaDeviceMemoryMapping();
3772 ~VmaDeviceMemoryMapping();
3774 void* GetMappedData()
const {
return m_pMappedData; }
3777 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData);
3778 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count);
3782 uint32_t m_MapCount;
3783 void* m_pMappedData;
3792 class VmaDeviceMemoryBlock
3795 uint32_t m_MemoryTypeIndex;
3796 VkDeviceMemory m_hMemory;
3797 VmaDeviceMemoryMapping m_Mapping;
3798 VmaBlockMetadata m_Metadata;
3800 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3802 ~VmaDeviceMemoryBlock()
3804 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3809 uint32_t newMemoryTypeIndex,
3810 VkDeviceMemory newMemory,
3811 VkDeviceSize newSize);
3813 void Destroy(VmaAllocator allocator);
3816 bool Validate()
const;
3819 VkResult Map(VmaAllocator hAllocator, uint32_t count,
void** ppData);
3820 void Unmap(VmaAllocator hAllocator, uint32_t count);
3823 struct VmaPointerLess
3825 bool operator()(
const void* lhs,
const void* rhs)
const 3831 class VmaDefragmentator;
3839 struct VmaBlockVector
3842 VmaAllocator hAllocator,
3843 uint32_t memoryTypeIndex,
3844 VkDeviceSize preferredBlockSize,
3845 size_t minBlockCount,
3846 size_t maxBlockCount,
3847 VkDeviceSize bufferImageGranularity,
3848 uint32_t frameInUseCount,
3852 VkResult CreateMinBlocks();
3854 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3855 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3856 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3857 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3861 bool IsEmpty()
const {
return m_Blocks.empty(); }
3864 VmaPool hCurrentPool,
3865 uint32_t currentFrameIndex,
3866 const VkMemoryRequirements& vkMemReq,
3868 VmaSuballocationType suballocType,
3869 VmaAllocation* pAllocation);
3872 VmaAllocation hAllocation);
3877 #if VMA_STATS_STRING_ENABLED 3878 void PrintDetailedMap(
class VmaJsonWriter& json);
3881 void MakePoolAllocationsLost(
3882 uint32_t currentFrameIndex,
3883 size_t* pLostAllocationCount);
3885 VmaDefragmentator* EnsureDefragmentator(
3886 VmaAllocator hAllocator,
3887 uint32_t currentFrameIndex);
3889 VkResult Defragment(
3891 VkDeviceSize& maxBytesToMove,
3892 uint32_t& maxAllocationsToMove);
3894 void DestroyDefragmentator();
3897 friend class VmaDefragmentator;
3899 const VmaAllocator m_hAllocator;
3900 const uint32_t m_MemoryTypeIndex;
3901 const VkDeviceSize m_PreferredBlockSize;
3902 const size_t m_MinBlockCount;
3903 const size_t m_MaxBlockCount;
3904 const VkDeviceSize m_BufferImageGranularity;
3905 const uint32_t m_FrameInUseCount;
3906 const bool m_IsCustomPool;
3909 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3913 bool m_HasEmptyBlock;
3914 VmaDefragmentator* m_pDefragmentator;
3916 size_t CalcMaxBlockSize()
const;
3919 void Remove(VmaDeviceMemoryBlock* pBlock);
3923 void IncrementallySortBlocks();
3925 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3931 VmaBlockVector m_BlockVector;
3935 VmaAllocator hAllocator,
3939 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3941 #if VMA_STATS_STRING_ENABLED 3946 class VmaDefragmentator
3948 const VmaAllocator m_hAllocator;
3949 VmaBlockVector*
const m_pBlockVector;
3950 uint32_t m_CurrentFrameIndex;
3951 VkDeviceSize m_BytesMoved;
3952 uint32_t m_AllocationsMoved;
3954 struct AllocationInfo
3956 VmaAllocation m_hAllocation;
3957 VkBool32* m_pChanged;
3960 m_hAllocation(VK_NULL_HANDLE),
3961 m_pChanged(VMA_NULL)
3966 struct AllocationInfoSizeGreater
3968 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3970 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3975 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3979 VmaDeviceMemoryBlock* m_pBlock;
3980 bool m_HasNonMovableAllocations;
3981 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3983 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3985 m_HasNonMovableAllocations(true),
3986 m_Allocations(pAllocationCallbacks),
3987 m_pMappedDataForDefragmentation(VMA_NULL)
3991 void CalcHasNonMovableAllocations()
3993 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3994 const size_t defragmentAllocCount = m_Allocations.size();
3995 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3998 void SortAllocationsBySizeDescecnding()
4000 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4003 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
4004 void Unmap(VmaAllocator hAllocator);
4008 void* m_pMappedDataForDefragmentation;
4011 struct BlockPointerLess
4013 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4015 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4017 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4019 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4025 struct BlockInfoCompareMoveDestination
4027 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4029 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4033 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4037 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4045 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4046 BlockInfoVector m_Blocks;
4048 VkResult DefragmentRound(
4049 VkDeviceSize maxBytesToMove,
4050 uint32_t maxAllocationsToMove);
4052 static bool MoveMakesSense(
4053 size_t dstBlockIndex, VkDeviceSize dstOffset,
4054 size_t srcBlockIndex, VkDeviceSize srcOffset);
4058 VmaAllocator hAllocator,
4059 VmaBlockVector* pBlockVector,
4060 uint32_t currentFrameIndex);
4062 ~VmaDefragmentator();
4064 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4065 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4067 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
4069 VkResult Defragment(
4070 VkDeviceSize maxBytesToMove,
4071 uint32_t maxAllocationsToMove);
4075 struct VmaAllocator_T
4078 bool m_UseKhrDedicatedAllocation;
4080 bool m_AllocationCallbacksSpecified;
4081 VkAllocationCallbacks m_AllocationCallbacks;
4085 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4086 VMA_MUTEX m_HeapSizeLimitMutex;
4088 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4089 VkPhysicalDeviceMemoryProperties m_MemProps;
4092 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4095 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4096 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4097 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4102 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4104 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4108 return m_VulkanFunctions;
4111 VkDeviceSize GetBufferImageGranularity()
const 4114 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4115 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4118 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4119 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4121 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4123 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4124 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4127 void GetBufferMemoryRequirements(
4129 VkMemoryRequirements& memReq,
4130 bool& requiresDedicatedAllocation,
4131 bool& prefersDedicatedAllocation)
const;
4132 void GetImageMemoryRequirements(
4134 VkMemoryRequirements& memReq,
4135 bool& requiresDedicatedAllocation,
4136 bool& prefersDedicatedAllocation)
const;
4139 VkResult AllocateMemory(
4140 const VkMemoryRequirements& vkMemReq,
4141 bool requiresDedicatedAllocation,
4142 bool prefersDedicatedAllocation,
4143 VkBuffer dedicatedBuffer,
4144 VkImage dedicatedImage,
4146 VmaSuballocationType suballocType,
4147 VmaAllocation* pAllocation);
4150 void FreeMemory(
const VmaAllocation allocation);
4152 void CalculateStats(
VmaStats* pStats);
4154 #if VMA_STATS_STRING_ENABLED 4155 void PrintDetailedMap(
class VmaJsonWriter& json);
4158 VkResult Defragment(
4159 VmaAllocation* pAllocations,
4160 size_t allocationCount,
4161 VkBool32* pAllocationsChanged,
4165 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
4166 bool TouchAllocation(VmaAllocation hAllocation);
4169 void DestroyPool(VmaPool pool);
4170 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
4172 void SetCurrentFrameIndex(uint32_t frameIndex);
4174 void MakePoolAllocationsLost(
4176 size_t* pLostAllocationCount);
4178 void CreateLostAllocation(VmaAllocation* pAllocation);
4180 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4181 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4183 VkResult Map(VmaAllocation hAllocation,
void** ppData);
4184 void Unmap(VmaAllocation hAllocation);
4187 VkDeviceSize m_PreferredLargeHeapBlockSize;
4189 VkPhysicalDevice m_PhysicalDevice;
4190 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4192 VMA_MUTEX m_PoolsMutex;
4194 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4200 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4202 VkResult AllocateMemoryOfType(
4203 const VkMemoryRequirements& vkMemReq,
4204 bool dedicatedAllocation,
4205 VkBuffer dedicatedBuffer,
4206 VkImage dedicatedImage,
4208 uint32_t memTypeIndex,
4209 VmaSuballocationType suballocType,
4210 VmaAllocation* pAllocation);
4213 VkResult AllocateDedicatedMemory(
4215 VmaSuballocationType suballocType,
4216 uint32_t memTypeIndex,
4218 bool isUserDataString,
4220 VkBuffer dedicatedBuffer,
4221 VkImage dedicatedImage,
4222 VmaAllocation* pAllocation);
4225 void FreeDedicatedMemory(VmaAllocation allocation);
4231 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
4233 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4236 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
4238 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4241 template<
typename T>
4242 static T* VmaAllocate(VmaAllocator hAllocator)
4244 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4247 template<
typename T>
4248 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
4250 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4253 template<
typename T>
4254 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4259 VmaFree(hAllocator, ptr);
4263 template<
typename T>
4264 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4268 for(
size_t i = count; i--; )
4270 VmaFree(hAllocator, ptr);
4277 #if VMA_STATS_STRING_ENABLED 4279 class VmaStringBuilder
4282 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4283 size_t GetLength()
const {
return m_Data.size(); }
4284 const char* GetData()
const {
return m_Data.data(); }
4286 void Add(
char ch) { m_Data.push_back(ch); }
4287 void Add(
const char* pStr);
4288 void AddNewLine() { Add(
'\n'); }
4289 void AddNumber(uint32_t num);
4290 void AddNumber(uint64_t num);
4291 void AddPointer(
const void* ptr);
4294 VmaVector< char, VmaStlAllocator<char> > m_Data;
4297 void VmaStringBuilder::Add(
const char* pStr)
4299 const size_t strLen = strlen(pStr);
4302 const size_t oldCount = m_Data.size();
4303 m_Data.resize(oldCount + strLen);
4304 memcpy(m_Data.data() + oldCount, pStr, strLen);
4308 void VmaStringBuilder::AddNumber(uint32_t num)
4311 VmaUint32ToStr(buf,
sizeof(buf), num);
4315 void VmaStringBuilder::AddNumber(uint64_t num)
4318 VmaUint64ToStr(buf,
sizeof(buf), num);
4322 void VmaStringBuilder::AddPointer(
const void* ptr)
4325 VmaPtrToStr(buf,
sizeof(buf), ptr);
4329 #endif // #if VMA_STATS_STRING_ENABLED 4334 #if VMA_STATS_STRING_ENABLED 4339 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4342 void BeginObject(
bool singleLine =
false);
4345 void BeginArray(
bool singleLine =
false);
4348 void WriteString(
const char* pStr);
4349 void BeginString(
const char* pStr = VMA_NULL);
4350 void ContinueString(
const char* pStr);
4351 void ContinueString(uint32_t n);
4352 void ContinueString(uint64_t n);
4353 void ContinueString_Pointer(
const void* ptr);
4354 void EndString(
const char* pStr = VMA_NULL);
4356 void WriteNumber(uint32_t n);
4357 void WriteNumber(uint64_t n);
4358 void WriteBool(
bool b);
4362 static const char*
const INDENT;
4364 enum COLLECTION_TYPE
4366 COLLECTION_TYPE_OBJECT,
4367 COLLECTION_TYPE_ARRAY,
4371 COLLECTION_TYPE type;
4372 uint32_t valueCount;
4373 bool singleLineMode;
4376 VmaStringBuilder& m_SB;
4377 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4378 bool m_InsideString;
4380 void BeginValue(
bool isString);
4381 void WriteIndent(
bool oneLess =
false);
4384 const char*
const VmaJsonWriter::INDENT =
" ";
4386 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4388 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4389 m_InsideString(false)
4393 VmaJsonWriter::~VmaJsonWriter()
4395 VMA_ASSERT(!m_InsideString);
4396 VMA_ASSERT(m_Stack.empty());
4399 void VmaJsonWriter::BeginObject(
bool singleLine)
4401 VMA_ASSERT(!m_InsideString);
4407 item.type = COLLECTION_TYPE_OBJECT;
4408 item.valueCount = 0;
4409 item.singleLineMode = singleLine;
4410 m_Stack.push_back(item);
4413 void VmaJsonWriter::EndObject()
4415 VMA_ASSERT(!m_InsideString);
4420 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4424 void VmaJsonWriter::BeginArray(
bool singleLine)
4426 VMA_ASSERT(!m_InsideString);
4432 item.type = COLLECTION_TYPE_ARRAY;
4433 item.valueCount = 0;
4434 item.singleLineMode = singleLine;
4435 m_Stack.push_back(item);
4438 void VmaJsonWriter::EndArray()
4440 VMA_ASSERT(!m_InsideString);
4445 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4449 void VmaJsonWriter::WriteString(
const char* pStr)
4455 void VmaJsonWriter::BeginString(
const char* pStr)
4457 VMA_ASSERT(!m_InsideString);
4461 m_InsideString =
true;
4462 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4464 ContinueString(pStr);
4468 void VmaJsonWriter::ContinueString(
const char* pStr)
4470 VMA_ASSERT(m_InsideString);
4472 const size_t strLen = strlen(pStr);
4473 for(
size_t i = 0; i < strLen; ++i)
4506 VMA_ASSERT(0 &&
"Character not currently supported.");
4512 void VmaJsonWriter::ContinueString(uint32_t n)
4514 VMA_ASSERT(m_InsideString);
4518 void VmaJsonWriter::ContinueString(uint64_t n)
4520 VMA_ASSERT(m_InsideString);
4524 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4526 VMA_ASSERT(m_InsideString);
4527 m_SB.AddPointer(ptr);
4530 void VmaJsonWriter::EndString(
const char* pStr)
4532 VMA_ASSERT(m_InsideString);
4533 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4535 ContinueString(pStr);
4538 m_InsideString =
false;
4541 void VmaJsonWriter::WriteNumber(uint32_t n)
4543 VMA_ASSERT(!m_InsideString);
4548 void VmaJsonWriter::WriteNumber(uint64_t n)
4550 VMA_ASSERT(!m_InsideString);
4555 void VmaJsonWriter::WriteBool(
bool b)
4557 VMA_ASSERT(!m_InsideString);
4559 m_SB.Add(b ?
"true" :
"false");
4562 void VmaJsonWriter::WriteNull()
4564 VMA_ASSERT(!m_InsideString);
4569 void VmaJsonWriter::BeginValue(
bool isString)
4571 if(!m_Stack.empty())
4573 StackItem& currItem = m_Stack.back();
4574 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4575 currItem.valueCount % 2 == 0)
4577 VMA_ASSERT(isString);
4580 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4581 currItem.valueCount % 2 != 0)
4585 else if(currItem.valueCount > 0)
4594 ++currItem.valueCount;
4598 void VmaJsonWriter::WriteIndent(
bool oneLess)
4600 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4604 size_t count = m_Stack.size();
4605 if(count > 0 && oneLess)
4609 for(
size_t i = 0; i < count; ++i)
4616 #endif // #if VMA_STATS_STRING_ENABLED 4620 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4622 if(IsUserDataString())
4624 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4626 FreeUserDataString(hAllocator);
4628 if(pUserData != VMA_NULL)
4630 const char*
const newStrSrc = (
char*)pUserData;
4631 const size_t newStrLen = strlen(newStrSrc);
4632 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4633 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4634 m_pUserData = newStrDst;
4639 m_pUserData = pUserData;
4643 void VmaAllocation_T::ChangeBlockAllocation(
4644 VmaAllocator hAllocator,
4645 VmaDeviceMemoryBlock* block,
4646 VkDeviceSize offset)
4648 VMA_ASSERT(block != VMA_NULL);
4649 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4652 if(block != m_BlockAllocation.m_Block)
4654 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4655 if(IsPersistentMap())
4657 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4658 block->Map(hAllocator, mapRefCount, VMA_NULL);
4661 m_BlockAllocation.m_Block = block;
4662 m_BlockAllocation.m_Offset = offset;
4665 VkDeviceSize VmaAllocation_T::GetOffset()
const 4669 case ALLOCATION_TYPE_BLOCK:
4670 return m_BlockAllocation.m_Offset;
4671 case ALLOCATION_TYPE_DEDICATED:
4679 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4683 case ALLOCATION_TYPE_BLOCK:
4684 return m_BlockAllocation.m_Block->m_hMemory;
4685 case ALLOCATION_TYPE_DEDICATED:
4686 return m_DedicatedAllocation.m_hMemory;
4689 return VK_NULL_HANDLE;
4693 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4697 case ALLOCATION_TYPE_BLOCK:
4698 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4699 case ALLOCATION_TYPE_DEDICATED:
4700 return m_DedicatedAllocation.m_MemoryTypeIndex;
4707 void* VmaAllocation_T::GetMappedData()
const 4711 case ALLOCATION_TYPE_BLOCK:
4714 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4715 VMA_ASSERT(pBlockData != VMA_NULL);
4716 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4723 case ALLOCATION_TYPE_DEDICATED:
4724 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4725 return m_DedicatedAllocation.m_pMappedData;
4732 bool VmaAllocation_T::CanBecomeLost()
const 4736 case ALLOCATION_TYPE_BLOCK:
4737 return m_BlockAllocation.m_CanBecomeLost;
4738 case ALLOCATION_TYPE_DEDICATED:
4746 VmaPool VmaAllocation_T::GetPool()
const 4748 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4749 return m_BlockAllocation.m_hPool;
4752 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4754 VMA_ASSERT(CanBecomeLost());
4760 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4763 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4768 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4774 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4784 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4786 VMA_ASSERT(IsUserDataString());
4787 if(m_pUserData != VMA_NULL)
4789 char*
const oldStr = (
char*)m_pUserData;
4790 const size_t oldStrLen = strlen(oldStr);
4791 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4792 m_pUserData = VMA_NULL;
4796 void VmaAllocation_T::BlockAllocMap()
4798 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4800 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4806 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4810 void VmaAllocation_T::BlockAllocUnmap()
4812 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4814 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4820 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4824 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4826 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4830 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4832 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4833 *ppData = m_DedicatedAllocation.m_pMappedData;
4839 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4840 return VK_ERROR_MEMORY_MAP_FAILED;
4845 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4846 hAllocator->m_hDevice,
4847 m_DedicatedAllocation.m_hMemory,
4852 if(result == VK_SUCCESS)
4854 m_DedicatedAllocation.m_pMappedData = *ppData;
4861 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4863 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4865 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4870 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4871 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4872 hAllocator->m_hDevice,
4873 m_DedicatedAllocation.m_hMemory);
4878 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4882 #if VMA_STATS_STRING_ENABLED 4885 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4894 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4898 json.WriteString(
"Blocks");
4901 json.WriteString(
"Allocations");
4904 json.WriteString(
"UnusedRanges");
4907 json.WriteString(
"UsedBytes");
4910 json.WriteString(
"UnusedBytes");
4915 json.WriteString(
"AllocationSize");
4916 json.BeginObject(
true);
4917 json.WriteString(
"Min");
4919 json.WriteString(
"Avg");
4921 json.WriteString(
"Max");
4928 json.WriteString(
"UnusedRangeSize");
4929 json.BeginObject(
true);
4930 json.WriteString(
"Min");
4932 json.WriteString(
"Avg");
4934 json.WriteString(
"Max");
4942 #endif // #if VMA_STATS_STRING_ENABLED 4944 struct VmaSuballocationItemSizeLess
4947 const VmaSuballocationList::iterator lhs,
4948 const VmaSuballocationList::iterator rhs)
const 4950 return lhs->size < rhs->size;
4953 const VmaSuballocationList::iterator lhs,
4954 VkDeviceSize rhsSize)
const 4956 return lhs->size < rhsSize;
4963 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4967 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4968 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4972 VmaBlockMetadata::~VmaBlockMetadata()
4976 void VmaBlockMetadata::Init(VkDeviceSize size)
4980 m_SumFreeSize = size;
4982 VmaSuballocation suballoc = {};
4983 suballoc.offset = 0;
4984 suballoc.size = size;
4985 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4986 suballoc.hAllocation = VK_NULL_HANDLE;
4988 m_Suballocations.push_back(suballoc);
4989 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4991 m_FreeSuballocationsBySize.push_back(suballocItem);
4994 bool VmaBlockMetadata::Validate()
const 4996 if(m_Suballocations.empty())
5002 VkDeviceSize calculatedOffset = 0;
5004 uint32_t calculatedFreeCount = 0;
5006 VkDeviceSize calculatedSumFreeSize = 0;
5009 size_t freeSuballocationsToRegister = 0;
5011 bool prevFree =
false;
5013 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5014 suballocItem != m_Suballocations.cend();
5017 const VmaSuballocation& subAlloc = *suballocItem;
5020 if(subAlloc.offset != calculatedOffset)
5025 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5027 if(prevFree && currFree)
5032 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5039 calculatedSumFreeSize += subAlloc.size;
5040 ++calculatedFreeCount;
5041 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5043 ++freeSuballocationsToRegister;
5048 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5052 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5058 calculatedOffset += subAlloc.size;
5059 prevFree = currFree;
5064 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5069 VkDeviceSize lastSize = 0;
5070 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5072 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5075 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5080 if(suballocItem->size < lastSize)
5085 lastSize = suballocItem->size;
5089 if(!ValidateFreeSuballocationList() ||
5090 (calculatedOffset != m_Size) ||
5091 (calculatedSumFreeSize != m_SumFreeSize) ||
5092 (calculatedFreeCount != m_FreeCount))
5100 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5102 if(!m_FreeSuballocationsBySize.empty())
5104 return m_FreeSuballocationsBySize.back()->size;
5112 bool VmaBlockMetadata::IsEmpty()
const 5114 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5117 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5121 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5133 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5134 suballocItem != m_Suballocations.cend();
5137 const VmaSuballocation& suballoc = *suballocItem;
5138 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5151 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5153 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5155 inoutStats.
size += m_Size;
5162 #if VMA_STATS_STRING_ENABLED 5164 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5168 json.WriteString(
"TotalBytes");
5169 json.WriteNumber(m_Size);
5171 json.WriteString(
"UnusedBytes");
5172 json.WriteNumber(m_SumFreeSize);
5174 json.WriteString(
"Allocations");
5175 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5177 json.WriteString(
"UnusedRanges");
5178 json.WriteNumber(m_FreeCount);
5180 json.WriteString(
"Suballocations");
5183 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5184 suballocItem != m_Suballocations.cend();
5185 ++suballocItem, ++i)
5187 json.BeginObject(
true);
5189 json.WriteString(
"Type");
5190 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5192 json.WriteString(
"Size");
5193 json.WriteNumber(suballocItem->size);
5195 json.WriteString(
"Offset");
5196 json.WriteNumber(suballocItem->offset);
5198 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5200 const void* pUserData = suballocItem->hAllocation->GetUserData();
5201 if(pUserData != VMA_NULL)
5203 json.WriteString(
"UserData");
5204 if(suballocItem->hAllocation->IsUserDataString())
5206 json.WriteString((
const char*)pUserData);
5211 json.ContinueString_Pointer(pUserData);
5224 #endif // #if VMA_STATS_STRING_ENABLED 5236 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5238 VMA_ASSERT(IsEmpty());
5239 pAllocationRequest->offset = 0;
5240 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5241 pAllocationRequest->sumItemSize = 0;
5242 pAllocationRequest->item = m_Suballocations.begin();
5243 pAllocationRequest->itemsToMakeLostCount = 0;
5246 bool VmaBlockMetadata::CreateAllocationRequest(
5247 uint32_t currentFrameIndex,
5248 uint32_t frameInUseCount,
5249 VkDeviceSize bufferImageGranularity,
5250 VkDeviceSize allocSize,
5251 VkDeviceSize allocAlignment,
5252 VmaSuballocationType allocType,
5253 bool canMakeOtherLost,
5254 VmaAllocationRequest* pAllocationRequest)
5256 VMA_ASSERT(allocSize > 0);
5257 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5258 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5259 VMA_HEAVY_ASSERT(Validate());
5262 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5268 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5269 if(freeSuballocCount > 0)
5274 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5275 m_FreeSuballocationsBySize.data(),
5276 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5278 VmaSuballocationItemSizeLess());
5279 size_t index = it - m_FreeSuballocationsBySize.data();
5280 for(; index < freeSuballocCount; ++index)
5285 bufferImageGranularity,
5289 m_FreeSuballocationsBySize[index],
5291 &pAllocationRequest->offset,
5292 &pAllocationRequest->itemsToMakeLostCount,
5293 &pAllocationRequest->sumFreeSize,
5294 &pAllocationRequest->sumItemSize))
5296 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5304 for(
size_t index = freeSuballocCount; index--; )
5309 bufferImageGranularity,
5313 m_FreeSuballocationsBySize[index],
5315 &pAllocationRequest->offset,
5316 &pAllocationRequest->itemsToMakeLostCount,
5317 &pAllocationRequest->sumFreeSize,
5318 &pAllocationRequest->sumItemSize))
5320 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5327 if(canMakeOtherLost)
5331 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5332 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5334 VmaAllocationRequest tmpAllocRequest = {};
5335 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5336 suballocIt != m_Suballocations.end();
5339 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5340 suballocIt->hAllocation->CanBecomeLost())
5345 bufferImageGranularity,
5351 &tmpAllocRequest.offset,
5352 &tmpAllocRequest.itemsToMakeLostCount,
5353 &tmpAllocRequest.sumFreeSize,
5354 &tmpAllocRequest.sumItemSize))
5356 tmpAllocRequest.item = suballocIt;
5358 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5360 *pAllocationRequest = tmpAllocRequest;
5366 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5375 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5376 uint32_t currentFrameIndex,
5377 uint32_t frameInUseCount,
5378 VmaAllocationRequest* pAllocationRequest)
5380 while(pAllocationRequest->itemsToMakeLostCount > 0)
5382 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5384 ++pAllocationRequest->item;
5386 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5387 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5388 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5389 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5391 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5392 --pAllocationRequest->itemsToMakeLostCount;
5400 VMA_HEAVY_ASSERT(Validate());
5401 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5402 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5407 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5409 uint32_t lostAllocationCount = 0;
5410 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5411 it != m_Suballocations.end();
5414 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5415 it->hAllocation->CanBecomeLost() &&
5416 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5418 it = FreeSuballocation(it);
5419 ++lostAllocationCount;
5422 return lostAllocationCount;
5425 void VmaBlockMetadata::Alloc(
5426 const VmaAllocationRequest& request,
5427 VmaSuballocationType type,
5428 VkDeviceSize allocSize,
5429 VmaAllocation hAllocation)
5431 VMA_ASSERT(request.item != m_Suballocations.end());
5432 VmaSuballocation& suballoc = *request.item;
5434 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5436 VMA_ASSERT(request.offset >= suballoc.offset);
5437 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5438 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5439 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5443 UnregisterFreeSuballocation(request.item);
5445 suballoc.offset = request.offset;
5446 suballoc.size = allocSize;
5447 suballoc.type = type;
5448 suballoc.hAllocation = hAllocation;
5453 VmaSuballocation paddingSuballoc = {};
5454 paddingSuballoc.offset = request.offset + allocSize;
5455 paddingSuballoc.size = paddingEnd;
5456 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5457 VmaSuballocationList::iterator next = request.item;
5459 const VmaSuballocationList::iterator paddingEndItem =
5460 m_Suballocations.insert(next, paddingSuballoc);
5461 RegisterFreeSuballocation(paddingEndItem);
5467 VmaSuballocation paddingSuballoc = {};
5468 paddingSuballoc.offset = request.offset - paddingBegin;
5469 paddingSuballoc.size = paddingBegin;
5470 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5471 const VmaSuballocationList::iterator paddingBeginItem =
5472 m_Suballocations.insert(request.item, paddingSuballoc);
5473 RegisterFreeSuballocation(paddingBeginItem);
5477 m_FreeCount = m_FreeCount - 1;
5478 if(paddingBegin > 0)
5486 m_SumFreeSize -= allocSize;
5489 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5491 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5492 suballocItem != m_Suballocations.end();
5495 VmaSuballocation& suballoc = *suballocItem;
5496 if(suballoc.hAllocation == allocation)
5498 FreeSuballocation(suballocItem);
5499 VMA_HEAVY_ASSERT(Validate());
5503 VMA_ASSERT(0 &&
"Not found!");
5506 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5508 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5509 suballocItem != m_Suballocations.end();
5512 VmaSuballocation& suballoc = *suballocItem;
5513 if(suballoc.offset == offset)
5515 FreeSuballocation(suballocItem);
5519 VMA_ASSERT(0 &&
"Not found!");
5522 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5524 VkDeviceSize lastSize = 0;
5525 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5527 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5529 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5534 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5539 if(it->size < lastSize)
5545 lastSize = it->size;
5550 bool VmaBlockMetadata::CheckAllocation(
5551 uint32_t currentFrameIndex,
5552 uint32_t frameInUseCount,
5553 VkDeviceSize bufferImageGranularity,
5554 VkDeviceSize allocSize,
5555 VkDeviceSize allocAlignment,
5556 VmaSuballocationType allocType,
5557 VmaSuballocationList::const_iterator suballocItem,
5558 bool canMakeOtherLost,
5559 VkDeviceSize* pOffset,
5560 size_t* itemsToMakeLostCount,
5561 VkDeviceSize* pSumFreeSize,
5562 VkDeviceSize* pSumItemSize)
const 5564 VMA_ASSERT(allocSize > 0);
5565 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5566 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5567 VMA_ASSERT(pOffset != VMA_NULL);
5569 *itemsToMakeLostCount = 0;
5573 if(canMakeOtherLost)
5575 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5577 *pSumFreeSize = suballocItem->size;
5581 if(suballocItem->hAllocation->CanBecomeLost() &&
5582 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5584 ++*itemsToMakeLostCount;
5585 *pSumItemSize = suballocItem->size;
5594 if(m_Size - suballocItem->offset < allocSize)
5600 *pOffset = suballocItem->offset;
5603 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5605 *pOffset += VMA_DEBUG_MARGIN;
5609 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5610 *pOffset = VmaAlignUp(*pOffset, alignment);
5614 if(bufferImageGranularity > 1)
5616 bool bufferImageGranularityConflict =
false;
5617 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5618 while(prevSuballocItem != m_Suballocations.cbegin())
5621 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5622 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5624 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5626 bufferImageGranularityConflict =
true;
5634 if(bufferImageGranularityConflict)
5636 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5642 if(*pOffset >= suballocItem->offset + suballocItem->size)
5648 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5651 VmaSuballocationList::const_iterator next = suballocItem;
5653 const VkDeviceSize requiredEndMargin =
5654 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5656 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5658 if(suballocItem->offset + totalSize > m_Size)
5665 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5666 if(totalSize > suballocItem->size)
5668 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5669 while(remainingSize > 0)
5672 if(lastSuballocItem == m_Suballocations.cend())
5676 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5678 *pSumFreeSize += lastSuballocItem->size;
5682 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5683 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5684 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5686 ++*itemsToMakeLostCount;
5687 *pSumItemSize += lastSuballocItem->size;
5694 remainingSize = (lastSuballocItem->size < remainingSize) ?
5695 remainingSize - lastSuballocItem->size : 0;
5701 if(bufferImageGranularity > 1)
5703 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5705 while(nextSuballocItem != m_Suballocations.cend())
5707 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5708 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5710 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5712 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5713 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5714 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5716 ++*itemsToMakeLostCount;
5735 const VmaSuballocation& suballoc = *suballocItem;
5736 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5738 *pSumFreeSize = suballoc.size;
5741 if(suballoc.size < allocSize)
5747 *pOffset = suballoc.offset;
5750 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5752 *pOffset += VMA_DEBUG_MARGIN;
5756 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5757 *pOffset = VmaAlignUp(*pOffset, alignment);
5761 if(bufferImageGranularity > 1)
5763 bool bufferImageGranularityConflict =
false;
5764 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5765 while(prevSuballocItem != m_Suballocations.cbegin())
5768 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5769 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5771 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5773 bufferImageGranularityConflict =
true;
5781 if(bufferImageGranularityConflict)
5783 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5788 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5791 VmaSuballocationList::const_iterator next = suballocItem;
5793 const VkDeviceSize requiredEndMargin =
5794 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5797 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5804 if(bufferImageGranularity > 1)
5806 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5808 while(nextSuballocItem != m_Suballocations.cend())
5810 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5811 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5813 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5832 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5834 VMA_ASSERT(item != m_Suballocations.end());
5835 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5837 VmaSuballocationList::iterator nextItem = item;
5839 VMA_ASSERT(nextItem != m_Suballocations.end());
5840 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5842 item->size += nextItem->size;
5844 m_Suballocations.erase(nextItem);
5847 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5850 VmaSuballocation& suballoc = *suballocItem;
5851 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5852 suballoc.hAllocation = VK_NULL_HANDLE;
5856 m_SumFreeSize += suballoc.size;
5859 bool mergeWithNext =
false;
5860 bool mergeWithPrev =
false;
5862 VmaSuballocationList::iterator nextItem = suballocItem;
5864 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5866 mergeWithNext =
true;
5869 VmaSuballocationList::iterator prevItem = suballocItem;
5870 if(suballocItem != m_Suballocations.begin())
5873 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5875 mergeWithPrev =
true;
5881 UnregisterFreeSuballocation(nextItem);
5882 MergeFreeWithNext(suballocItem);
5887 UnregisterFreeSuballocation(prevItem);
5888 MergeFreeWithNext(prevItem);
5889 RegisterFreeSuballocation(prevItem);
5894 RegisterFreeSuballocation(suballocItem);
5895 return suballocItem;
5899 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5901 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5902 VMA_ASSERT(item->size > 0);
5906 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5908 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5910 if(m_FreeSuballocationsBySize.empty())
5912 m_FreeSuballocationsBySize.push_back(item);
5916 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5924 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5926 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5927 VMA_ASSERT(item->size > 0);
5931 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5933 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5935 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5936 m_FreeSuballocationsBySize.data(),
5937 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5939 VmaSuballocationItemSizeLess());
5940 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5941 index < m_FreeSuballocationsBySize.size();
5944 if(m_FreeSuballocationsBySize[index] == item)
5946 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5949 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5951 VMA_ASSERT(0 &&
"Not found.");
5960 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5962 m_pMappedData(VMA_NULL)
5966 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5968 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5971 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData)
5978 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5981 m_MapCount += count;
5982 VMA_ASSERT(m_pMappedData != VMA_NULL);
5983 if(ppData != VMA_NULL)
5985 *ppData = m_pMappedData;
5991 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5992 hAllocator->m_hDevice,
5998 if(result == VK_SUCCESS)
6000 if(ppData != VMA_NULL)
6002 *ppData = m_pMappedData;
6010 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count)
6017 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6018 if(m_MapCount >= count)
6020 m_MapCount -= count;
6023 m_pMappedData = VMA_NULL;
6024 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
6029 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6036 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
6037 m_MemoryTypeIndex(UINT32_MAX),
6038 m_hMemory(VK_NULL_HANDLE),
6039 m_Metadata(hAllocator)
6043 void VmaDeviceMemoryBlock::Init(
6044 uint32_t newMemoryTypeIndex,
6045 VkDeviceMemory newMemory,
6046 VkDeviceSize newSize)
6048 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6050 m_MemoryTypeIndex = newMemoryTypeIndex;
6051 m_hMemory = newMemory;
6053 m_Metadata.Init(newSize);
6056 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
6060 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6062 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6063 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6064 m_hMemory = VK_NULL_HANDLE;
6067 bool VmaDeviceMemoryBlock::Validate()
const 6069 if((m_hMemory == VK_NULL_HANDLE) ||
6070 (m_Metadata.GetSize() == 0))
6075 return m_Metadata.Validate();
6078 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count,
void** ppData)
6080 return m_Mapping.Map(hAllocator, m_hMemory, count, ppData);
6083 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
6085 m_Mapping.Unmap(hAllocator, m_hMemory, count);
6090 memset(&outInfo, 0,
sizeof(outInfo));
6109 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6117 VmaPool_T::VmaPool_T(
6118 VmaAllocator hAllocator,
6122 createInfo.memoryTypeIndex,
6123 createInfo.blockSize,
6124 createInfo.minBlockCount,
6125 createInfo.maxBlockCount,
6127 createInfo.frameInUseCount,
6132 VmaPool_T::~VmaPool_T()
6136 #if VMA_STATS_STRING_ENABLED 6138 #endif // #if VMA_STATS_STRING_ENABLED 6140 VmaBlockVector::VmaBlockVector(
6141 VmaAllocator hAllocator,
6142 uint32_t memoryTypeIndex,
6143 VkDeviceSize preferredBlockSize,
6144 size_t minBlockCount,
6145 size_t maxBlockCount,
6146 VkDeviceSize bufferImageGranularity,
6147 uint32_t frameInUseCount,
6148 bool isCustomPool) :
6149 m_hAllocator(hAllocator),
6150 m_MemoryTypeIndex(memoryTypeIndex),
6151 m_PreferredBlockSize(preferredBlockSize),
6152 m_MinBlockCount(minBlockCount),
6153 m_MaxBlockCount(maxBlockCount),
6154 m_BufferImageGranularity(bufferImageGranularity),
6155 m_FrameInUseCount(frameInUseCount),
6156 m_IsCustomPool(isCustomPool),
6157 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6158 m_HasEmptyBlock(false),
6159 m_pDefragmentator(VMA_NULL)
6163 VmaBlockVector::~VmaBlockVector()
6165 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6167 for(
size_t i = m_Blocks.size(); i--; )
6169 m_Blocks[i]->Destroy(m_hAllocator);
6170 vma_delete(m_hAllocator, m_Blocks[i]);
6174 VkResult VmaBlockVector::CreateMinBlocks()
6176 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6178 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6179 if(res != VK_SUCCESS)
6187 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6195 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6197 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6199 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6201 VMA_HEAVY_ASSERT(pBlock->Validate());
6202 pBlock->m_Metadata.AddPoolStats(*pStats);
6206 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6208 VkResult VmaBlockVector::Allocate(
6209 VmaPool hCurrentPool,
6210 uint32_t currentFrameIndex,
6211 const VkMemoryRequirements& vkMemReq,
6213 VmaSuballocationType suballocType,
6214 VmaAllocation* pAllocation)
6219 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6223 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6225 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6226 VMA_ASSERT(pCurrBlock);
6227 VmaAllocationRequest currRequest = {};
6228 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6231 m_BufferImageGranularity,
6239 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6243 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6244 if(res != VK_SUCCESS)
6251 if(pCurrBlock->m_Metadata.IsEmpty())
6253 m_HasEmptyBlock =
false;
6256 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6257 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6258 (*pAllocation)->InitBlockAllocation(
6267 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6268 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6269 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6274 const bool canCreateNewBlock =
6276 (m_Blocks.size() < m_MaxBlockCount);
6279 if(canCreateNewBlock)
6282 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6283 uint32_t newBlockSizeShift = 0;
6284 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6288 if(m_IsCustomPool ==
false)
6291 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6292 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6294 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6295 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6297 newBlockSize = smallerNewBlockSize;
6298 ++newBlockSizeShift;
6307 size_t newBlockIndex = 0;
6308 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6310 if(m_IsCustomPool ==
false)
6312 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6314 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6315 if(smallerNewBlockSize >= vkMemReq.size)
6317 newBlockSize = smallerNewBlockSize;
6318 ++newBlockSizeShift;
6319 res = CreateBlock(newBlockSize, &newBlockIndex);
6328 if(res == VK_SUCCESS)
6330 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6331 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6335 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6336 if(res != VK_SUCCESS)
6343 VmaAllocationRequest allocRequest;
6344 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6345 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6346 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6347 (*pAllocation)->InitBlockAllocation(
6350 allocRequest.offset,
6356 VMA_HEAVY_ASSERT(pBlock->Validate());
6357 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6358 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6366 if(canMakeOtherLost)
6368 uint32_t tryIndex = 0;
6369 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6371 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6372 VmaAllocationRequest bestRequest = {};
6373 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6377 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6379 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6380 VMA_ASSERT(pCurrBlock);
6381 VmaAllocationRequest currRequest = {};
6382 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6385 m_BufferImageGranularity,
6392 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6393 if(pBestRequestBlock == VMA_NULL ||
6394 currRequestCost < bestRequestCost)
6396 pBestRequestBlock = pCurrBlock;
6397 bestRequest = currRequest;
6398 bestRequestCost = currRequestCost;
6400 if(bestRequestCost == 0)
6408 if(pBestRequestBlock != VMA_NULL)
6412 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6413 if(res != VK_SUCCESS)
6419 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6425 if(pBestRequestBlock->m_Metadata.IsEmpty())
6427 m_HasEmptyBlock =
false;
6430 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6431 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6432 (*pAllocation)->InitBlockAllocation(
6441 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6442 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6443 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6457 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6459 return VK_ERROR_TOO_MANY_OBJECTS;
6463 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6466 void VmaBlockVector::Free(
6467 VmaAllocation hAllocation)
6469 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6473 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6475 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6477 if(hAllocation->IsPersistentMap())
6479 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory, 1);
6482 pBlock->m_Metadata.Free(hAllocation);
6483 VMA_HEAVY_ASSERT(pBlock->Validate());
6485 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6488 if(pBlock->m_Metadata.IsEmpty())
6491 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6493 pBlockToDelete = pBlock;
6499 m_HasEmptyBlock =
true;
6504 else if(m_HasEmptyBlock)
6506 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6507 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6509 pBlockToDelete = pLastBlock;
6510 m_Blocks.pop_back();
6511 m_HasEmptyBlock =
false;
6515 IncrementallySortBlocks();
6520 if(pBlockToDelete != VMA_NULL)
6522 VMA_DEBUG_LOG(
" Deleted empty allocation");
6523 pBlockToDelete->Destroy(m_hAllocator);
6524 vma_delete(m_hAllocator, pBlockToDelete);
6528 size_t VmaBlockVector::CalcMaxBlockSize()
const 6531 for(
size_t i = m_Blocks.size(); i--; )
6533 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6534 if(result >= m_PreferredBlockSize)
6542 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6544 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6546 if(m_Blocks[blockIndex] == pBlock)
6548 VmaVectorRemove(m_Blocks, blockIndex);
6555 void VmaBlockVector::IncrementallySortBlocks()
6558 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6560 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6562 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6568 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6570 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6571 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6572 allocInfo.allocationSize = blockSize;
6573 VkDeviceMemory mem = VK_NULL_HANDLE;
6574 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6583 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6587 allocInfo.allocationSize);
6589 m_Blocks.push_back(pBlock);
6590 if(pNewBlockIndex != VMA_NULL)
6592 *pNewBlockIndex = m_Blocks.size() - 1;
6598 #if VMA_STATS_STRING_ENABLED 6600 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6602 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6608 json.WriteString(
"MemoryTypeIndex");
6609 json.WriteNumber(m_MemoryTypeIndex);
6611 json.WriteString(
"BlockSize");
6612 json.WriteNumber(m_PreferredBlockSize);
6614 json.WriteString(
"BlockCount");
6615 json.BeginObject(
true);
6616 if(m_MinBlockCount > 0)
6618 json.WriteString(
"Min");
6619 json.WriteNumber((uint64_t)m_MinBlockCount);
6621 if(m_MaxBlockCount < SIZE_MAX)
6623 json.WriteString(
"Max");
6624 json.WriteNumber((uint64_t)m_MaxBlockCount);
6626 json.WriteString(
"Cur");
6627 json.WriteNumber((uint64_t)m_Blocks.size());
6630 if(m_FrameInUseCount > 0)
6632 json.WriteString(
"FrameInUseCount");
6633 json.WriteNumber(m_FrameInUseCount);
6638 json.WriteString(
"PreferredBlockSize");
6639 json.WriteNumber(m_PreferredBlockSize);
6642 json.WriteString(
"Blocks");
6644 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6646 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6653 #endif // #if VMA_STATS_STRING_ENABLED 6655 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6656 VmaAllocator hAllocator,
6657 uint32_t currentFrameIndex)
6659 if(m_pDefragmentator == VMA_NULL)
6661 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6667 return m_pDefragmentator;
6670 VkResult VmaBlockVector::Defragment(
6672 VkDeviceSize& maxBytesToMove,
6673 uint32_t& maxAllocationsToMove)
6675 if(m_pDefragmentator == VMA_NULL)
6680 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6683 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6686 if(pDefragmentationStats != VMA_NULL)
6688 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6689 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6692 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6693 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6699 m_HasEmptyBlock =
false;
6700 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6702 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6703 if(pBlock->m_Metadata.IsEmpty())
6705 if(m_Blocks.size() > m_MinBlockCount)
6707 if(pDefragmentationStats != VMA_NULL)
6710 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6713 VmaVectorRemove(m_Blocks, blockIndex);
6714 pBlock->Destroy(m_hAllocator);
6715 vma_delete(m_hAllocator, pBlock);
6719 m_HasEmptyBlock =
true;
6727 void VmaBlockVector::DestroyDefragmentator()
6729 if(m_pDefragmentator != VMA_NULL)
6731 vma_delete(m_hAllocator, m_pDefragmentator);
6732 m_pDefragmentator = VMA_NULL;
6736 void VmaBlockVector::MakePoolAllocationsLost(
6737 uint32_t currentFrameIndex,
6738 size_t* pLostAllocationCount)
6740 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6741 size_t lostAllocationCount = 0;
6742 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6744 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6746 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6748 if(pLostAllocationCount != VMA_NULL)
6750 *pLostAllocationCount = lostAllocationCount;
6754 void VmaBlockVector::AddStats(
VmaStats* pStats)
6756 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6757 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6759 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6761 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6763 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6765 VMA_HEAVY_ASSERT(pBlock->Validate());
6767 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6768 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6769 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6770 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6777 VmaDefragmentator::VmaDefragmentator(
6778 VmaAllocator hAllocator,
6779 VmaBlockVector* pBlockVector,
6780 uint32_t currentFrameIndex) :
6781 m_hAllocator(hAllocator),
6782 m_pBlockVector(pBlockVector),
6783 m_CurrentFrameIndex(currentFrameIndex),
6785 m_AllocationsMoved(0),
6786 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6787 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6791 VmaDefragmentator::~VmaDefragmentator()
6793 for(
size_t i = m_Blocks.size(); i--; )
6795 vma_delete(m_hAllocator, m_Blocks[i]);
6799 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6801 AllocationInfo allocInfo;
6802 allocInfo.m_hAllocation = hAlloc;
6803 allocInfo.m_pChanged = pChanged;
6804 m_Allocations.push_back(allocInfo);
6807 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6810 if(m_pMappedDataForDefragmentation)
6812 *ppMappedData = m_pMappedDataForDefragmentation;
6817 if(m_pBlock->m_Mapping.GetMappedData())
6819 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6824 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
6825 *ppMappedData = m_pMappedDataForDefragmentation;
6829 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6831 if(m_pMappedDataForDefragmentation != VMA_NULL)
6833 m_pBlock->Unmap(hAllocator, 1);
6837 VkResult VmaDefragmentator::DefragmentRound(
6838 VkDeviceSize maxBytesToMove,
6839 uint32_t maxAllocationsToMove)
6841 if(m_Blocks.empty())
6846 size_t srcBlockIndex = m_Blocks.size() - 1;
6847 size_t srcAllocIndex = SIZE_MAX;
6853 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6855 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6858 if(srcBlockIndex == 0)
6865 srcAllocIndex = SIZE_MAX;
6870 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6874 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6875 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6877 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6878 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6879 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6880 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6883 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6885 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6886 VmaAllocationRequest dstAllocRequest;
6887 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6888 m_CurrentFrameIndex,
6889 m_pBlockVector->GetFrameInUseCount(),
6890 m_pBlockVector->GetBufferImageGranularity(),
6895 &dstAllocRequest) &&
6897 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6899 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6902 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6903 (m_BytesMoved + size > maxBytesToMove))
6905 return VK_INCOMPLETE;
6908 void* pDstMappedData = VMA_NULL;
6909 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6910 if(res != VK_SUCCESS)
6915 void* pSrcMappedData = VMA_NULL;
6916 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6917 if(res != VK_SUCCESS)
6924 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6925 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6926 static_cast<size_t>(size));
6928 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6929 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
6931 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6933 if(allocInfo.m_pChanged != VMA_NULL)
6935 *allocInfo.m_pChanged = VK_TRUE;
6938 ++m_AllocationsMoved;
6939 m_BytesMoved += size;
6941 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6949 if(srcAllocIndex > 0)
6955 if(srcBlockIndex > 0)
6958 srcAllocIndex = SIZE_MAX;
6968 VkResult VmaDefragmentator::Defragment(
6969 VkDeviceSize maxBytesToMove,
6970 uint32_t maxAllocationsToMove)
6972 if(m_Allocations.empty())
6978 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6979 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6981 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6982 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6983 m_Blocks.push_back(pBlockInfo);
6987 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6990 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6992 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6994 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6996 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6997 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6998 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7000 (*it)->m_Allocations.push_back(allocInfo);
7008 m_Allocations.clear();
7010 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7012 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7013 pBlockInfo->CalcHasNonMovableAllocations();
7014 pBlockInfo->SortAllocationsBySizeDescecnding();
7018 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7021 VkResult result = VK_SUCCESS;
7022 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7024 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7028 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7030 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7036 bool VmaDefragmentator::MoveMakesSense(
7037 size_t dstBlockIndex, VkDeviceSize dstOffset,
7038 size_t srcBlockIndex, VkDeviceSize srcOffset)
7040 if(dstBlockIndex < srcBlockIndex)
7044 if(dstBlockIndex > srcBlockIndex)
7048 if(dstOffset < srcOffset)
7061 m_hDevice(pCreateInfo->device),
7062 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7063 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7064 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7065 m_PreferredLargeHeapBlockSize(0),
7066 m_PhysicalDevice(pCreateInfo->physicalDevice),
7067 m_CurrentFrameIndex(0),
7068 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
7072 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7073 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7074 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7076 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7077 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7079 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7081 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7092 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7093 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7100 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7102 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7103 if(limit != VK_WHOLE_SIZE)
7105 m_HeapSizeLimit[heapIndex] = limit;
7106 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7108 m_MemProps.memoryHeaps[heapIndex].size = limit;
7114 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7116 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7118 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7124 GetBufferImageGranularity(),
7129 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7133 VmaAllocator_T::~VmaAllocator_T()
7135 VMA_ASSERT(m_Pools.empty());
7137 for(
size_t i = GetMemoryTypeCount(); i--; )
7139 vma_delete(
this, m_pDedicatedAllocations[i]);
7140 vma_delete(
this, m_pBlockVectors[i]);
7144 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7146 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7147 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7148 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7149 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7150 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7151 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7152 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7153 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7154 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7155 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7156 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7157 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7158 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7159 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7160 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7161 if(m_UseKhrDedicatedAllocation)
7163 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7164 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7165 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7166 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7168 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7170 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7171 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7173 if(pVulkanFunctions != VMA_NULL)
7175 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7176 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7177 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7178 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7179 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7180 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7181 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7182 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7183 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7184 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7185 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7186 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7187 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7188 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7189 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7190 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7193 #undef VMA_COPY_IF_NOT_NULL 7197 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7198 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7199 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7200 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7201 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7202 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7203 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7204 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7205 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7206 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7207 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7208 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7209 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7210 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7211 if(m_UseKhrDedicatedAllocation)
7213 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7214 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7218 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7220 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7221 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7222 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7223 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7226 VkResult VmaAllocator_T::AllocateMemoryOfType(
7227 const VkMemoryRequirements& vkMemReq,
7228 bool dedicatedAllocation,
7229 VkBuffer dedicatedBuffer,
7230 VkImage dedicatedImage,
7232 uint32_t memTypeIndex,
7233 VmaSuballocationType suballocType,
7234 VmaAllocation* pAllocation)
7236 VMA_ASSERT(pAllocation != VMA_NULL);
7237 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7243 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7248 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7249 VMA_ASSERT(blockVector);
7251 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7252 bool preferDedicatedMemory =
7253 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7254 dedicatedAllocation ||
7256 vkMemReq.size > preferredBlockSize / 2;
7258 if(preferDedicatedMemory &&
7260 finalCreateInfo.
pool == VK_NULL_HANDLE)
7269 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7273 return AllocateDedicatedMemory(
7287 VkResult res = blockVector->Allocate(
7289 m_CurrentFrameIndex.load(),
7294 if(res == VK_SUCCESS)
7302 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7306 res = AllocateDedicatedMemory(
7312 finalCreateInfo.pUserData,
7316 if(res == VK_SUCCESS)
7319 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7325 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7332 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7334 VmaSuballocationType suballocType,
7335 uint32_t memTypeIndex,
7337 bool isUserDataString,
7339 VkBuffer dedicatedBuffer,
7340 VkImage dedicatedImage,
7341 VmaAllocation* pAllocation)
7343 VMA_ASSERT(pAllocation);
7345 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7346 allocInfo.memoryTypeIndex = memTypeIndex;
7347 allocInfo.allocationSize = size;
7349 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7350 if(m_UseKhrDedicatedAllocation)
7352 if(dedicatedBuffer != VK_NULL_HANDLE)
7354 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7355 dedicatedAllocInfo.buffer = dedicatedBuffer;
7356 allocInfo.pNext = &dedicatedAllocInfo;
7358 else if(dedicatedImage != VK_NULL_HANDLE)
7360 dedicatedAllocInfo.image = dedicatedImage;
7361 allocInfo.pNext = &dedicatedAllocInfo;
7366 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7367 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7370 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7374 void* pMappedData = VMA_NULL;
7377 res = (*m_VulkanFunctions.vkMapMemory)(
7386 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7387 FreeVulkanMemory(memTypeIndex, size, hMemory);
7392 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7393 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7394 (*pAllocation)->SetUserData(
this, pUserData);
7398 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7399 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7400 VMA_ASSERT(pDedicatedAllocations);
7401 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7404 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7409 void VmaAllocator_T::GetBufferMemoryRequirements(
7411 VkMemoryRequirements& memReq,
7412 bool& requiresDedicatedAllocation,
7413 bool& prefersDedicatedAllocation)
const 7415 if(m_UseKhrDedicatedAllocation)
7417 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7418 memReqInfo.buffer = hBuffer;
7420 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7422 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7423 memReq2.pNext = &memDedicatedReq;
7425 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7427 memReq = memReq2.memoryRequirements;
7428 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7429 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7433 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7434 requiresDedicatedAllocation =
false;
7435 prefersDedicatedAllocation =
false;
7439 void VmaAllocator_T::GetImageMemoryRequirements(
7441 VkMemoryRequirements& memReq,
7442 bool& requiresDedicatedAllocation,
7443 bool& prefersDedicatedAllocation)
const 7445 if(m_UseKhrDedicatedAllocation)
7447 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7448 memReqInfo.image = hImage;
7450 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7452 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7453 memReq2.pNext = &memDedicatedReq;
7455 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7457 memReq = memReq2.memoryRequirements;
7458 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7459 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7463 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7464 requiresDedicatedAllocation =
false;
7465 prefersDedicatedAllocation =
false;
7469 VkResult VmaAllocator_T::AllocateMemory(
7470 const VkMemoryRequirements& vkMemReq,
7471 bool requiresDedicatedAllocation,
7472 bool prefersDedicatedAllocation,
7473 VkBuffer dedicatedBuffer,
7474 VkImage dedicatedImage,
7476 VmaSuballocationType suballocType,
7477 VmaAllocation* pAllocation)
7482 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7483 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7488 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7489 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7491 if(requiresDedicatedAllocation)
7495 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7496 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7498 if(createInfo.
pool != VK_NULL_HANDLE)
7500 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7501 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7504 if((createInfo.
pool != VK_NULL_HANDLE) &&
7507 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7508 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7511 if(createInfo.
pool != VK_NULL_HANDLE)
7513 return createInfo.
pool->m_BlockVector.Allocate(
7515 m_CurrentFrameIndex.load(),
7524 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7525 uint32_t memTypeIndex = UINT32_MAX;
7527 if(res == VK_SUCCESS)
7529 res = AllocateMemoryOfType(
7531 requiresDedicatedAllocation || prefersDedicatedAllocation,
7539 if(res == VK_SUCCESS)
7549 memoryTypeBits &= ~(1u << memTypeIndex);
7552 if(res == VK_SUCCESS)
7554 res = AllocateMemoryOfType(
7556 requiresDedicatedAllocation || prefersDedicatedAllocation,
7564 if(res == VK_SUCCESS)
7574 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7585 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7587 VMA_ASSERT(allocation);
7589 if(allocation->CanBecomeLost() ==
false ||
7590 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7592 switch(allocation->GetType())
7594 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7596 VmaBlockVector* pBlockVector = VMA_NULL;
7597 VmaPool hPool = allocation->GetPool();
7598 if(hPool != VK_NULL_HANDLE)
7600 pBlockVector = &hPool->m_BlockVector;
7604 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7605 pBlockVector = m_pBlockVectors[memTypeIndex];
7607 pBlockVector->Free(allocation);
7610 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7611 FreeDedicatedMemory(allocation);
7618 allocation->SetUserData(
this, VMA_NULL);
7619 vma_delete(
this, allocation);
7622 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7625 InitStatInfo(pStats->
total);
7626 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7628 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7632 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7634 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7635 VMA_ASSERT(pBlockVector);
7636 pBlockVector->AddStats(pStats);
7641 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7642 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7644 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7649 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7651 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7652 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7653 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7654 VMA_ASSERT(pDedicatedAllocVector);
7655 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7658 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7659 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7660 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7661 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7666 VmaPostprocessCalcStatInfo(pStats->
total);
7667 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7668 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7669 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7670 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7673 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7675 VkResult VmaAllocator_T::Defragment(
7676 VmaAllocation* pAllocations,
7677 size_t allocationCount,
7678 VkBool32* pAllocationsChanged,
7682 if(pAllocationsChanged != VMA_NULL)
7684 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7686 if(pDefragmentationStats != VMA_NULL)
7688 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7691 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7693 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7695 const size_t poolCount = m_Pools.size();
7698 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7700 VmaAllocation hAlloc = pAllocations[allocIndex];
7702 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7704 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7706 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7708 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7710 VmaBlockVector* pAllocBlockVector = VMA_NULL;
7712 const VmaPool hAllocPool = hAlloc->GetPool();
7714 if(hAllocPool != VK_NULL_HANDLE)
7716 pAllocBlockVector = &hAllocPool->GetBlockVector();
7721 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7724 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7726 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7727 &pAllocationsChanged[allocIndex] : VMA_NULL;
7728 pDefragmentator->AddAllocation(hAlloc, pChanged);
7732 VkResult result = VK_SUCCESS;
7736 VkDeviceSize maxBytesToMove = SIZE_MAX;
7737 uint32_t maxAllocationsToMove = UINT32_MAX;
7738 if(pDefragmentationInfo != VMA_NULL)
7745 for(uint32_t memTypeIndex = 0;
7746 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7750 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7752 result = m_pBlockVectors[memTypeIndex]->Defragment(
7753 pDefragmentationStats,
7755 maxAllocationsToMove);
7760 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7762 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7763 pDefragmentationStats,
7765 maxAllocationsToMove);
7771 for(
size_t poolIndex = poolCount; poolIndex--; )
7773 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7777 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7779 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7781 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7788 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7790 if(hAllocation->CanBecomeLost())
7796 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7797 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7800 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7804 pAllocationInfo->
offset = 0;
7805 pAllocationInfo->
size = hAllocation->GetSize();
7807 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7810 else if(localLastUseFrameIndex == localCurrFrameIndex)
7812 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7813 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7814 pAllocationInfo->
offset = hAllocation->GetOffset();
7815 pAllocationInfo->
size = hAllocation->GetSize();
7817 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7822 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7824 localLastUseFrameIndex = localCurrFrameIndex;
7831 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7832 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7833 pAllocationInfo->
offset = hAllocation->GetOffset();
7834 pAllocationInfo->
size = hAllocation->GetSize();
7835 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7836 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7840 bool VmaAllocator_T::TouchAllocation(VmaAllocation hAllocation)
7843 if(hAllocation->CanBecomeLost())
7845 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7846 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7849 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7853 else if(localLastUseFrameIndex == localCurrFrameIndex)
7859 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7861 localLastUseFrameIndex = localCurrFrameIndex;
7872 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7874 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7887 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7889 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7890 if(res != VK_SUCCESS)
7892 vma_delete(
this, *pPool);
7899 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7900 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7906 void VmaAllocator_T::DestroyPool(VmaPool pool)
7910 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7911 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7912 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7915 vma_delete(
this, pool);
7918 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7920 pool->m_BlockVector.GetPoolStats(pPoolStats);
7923 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7925 m_CurrentFrameIndex.store(frameIndex);
7928 void VmaAllocator_T::MakePoolAllocationsLost(
7930 size_t* pLostAllocationCount)
7932 hPool->m_BlockVector.MakePoolAllocationsLost(
7933 m_CurrentFrameIndex.load(),
7934 pLostAllocationCount);
7937 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7939 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7940 (*pAllocation)->InitLost();
7943 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7945 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7948 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7950 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7951 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7953 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7954 if(res == VK_SUCCESS)
7956 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7961 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7966 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7969 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7971 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7977 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7979 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7981 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7984 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7986 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7987 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7989 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7990 m_HeapSizeLimit[heapIndex] += size;
7994 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
7996 if(hAllocation->CanBecomeLost())
7998 return VK_ERROR_MEMORY_MAP_FAILED;
8001 switch(hAllocation->GetType())
8003 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8005 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8006 char *pBytes = VMA_NULL;
8007 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
8008 if(res == VK_SUCCESS)
8010 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8011 hAllocation->BlockAllocMap();
8015 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8016 return hAllocation->DedicatedAllocMap(
this, ppData);
8019 return VK_ERROR_MEMORY_MAP_FAILED;
8023 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
8025 switch(hAllocation->GetType())
8027 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8029 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8030 hAllocation->BlockAllocUnmap();
8031 pBlock->Unmap(
this, 1);
8034 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8035 hAllocation->DedicatedAllocUnmap(
this);
8042 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
8044 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8046 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8048 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8049 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8050 VMA_ASSERT(pDedicatedAllocations);
8051 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8052 VMA_ASSERT(success);
8055 VkDeviceMemory hMemory = allocation->GetMemory();
8057 if(allocation->GetMappedData() != VMA_NULL)
8059 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8062 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8064 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8067 #if VMA_STATS_STRING_ENABLED 8069 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8071 bool dedicatedAllocationsStarted =
false;
8072 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8074 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8075 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8076 VMA_ASSERT(pDedicatedAllocVector);
8077 if(pDedicatedAllocVector->empty() ==
false)
8079 if(dedicatedAllocationsStarted ==
false)
8081 dedicatedAllocationsStarted =
true;
8082 json.WriteString(
"DedicatedAllocations");
8086 json.BeginString(
"Type ");
8087 json.ContinueString(memTypeIndex);
8092 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8094 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
8095 json.BeginObject(
true);
8097 json.WriteString(
"Type");
8098 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
8100 json.WriteString(
"Size");
8101 json.WriteNumber(hAlloc->GetSize());
8103 const void* pUserData = hAlloc->GetUserData();
8104 if(pUserData != VMA_NULL)
8106 json.WriteString(
"UserData");
8107 if(hAlloc->IsUserDataString())
8109 json.WriteString((
const char*)pUserData);
8114 json.ContinueString_Pointer(pUserData);
8125 if(dedicatedAllocationsStarted)
8131 bool allocationsStarted =
false;
8132 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8134 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8136 if(allocationsStarted ==
false)
8138 allocationsStarted =
true;
8139 json.WriteString(
"DefaultPools");
8143 json.BeginString(
"Type ");
8144 json.ContinueString(memTypeIndex);
8147 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8150 if(allocationsStarted)
8157 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8158 const size_t poolCount = m_Pools.size();
8161 json.WriteString(
"Pools");
8163 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8165 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8172 #endif // #if VMA_STATS_STRING_ENABLED 8174 static VkResult AllocateMemoryForImage(
8175 VmaAllocator allocator,
8178 VmaSuballocationType suballocType,
8179 VmaAllocation* pAllocation)
8181 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8183 VkMemoryRequirements vkMemReq = {};
8184 bool requiresDedicatedAllocation =
false;
8185 bool prefersDedicatedAllocation =
false;
8186 allocator->GetImageMemoryRequirements(image, vkMemReq,
8187 requiresDedicatedAllocation, prefersDedicatedAllocation);
8189 return allocator->AllocateMemory(
8191 requiresDedicatedAllocation,
8192 prefersDedicatedAllocation,
8195 *pAllocationCreateInfo,
8205 VmaAllocator* pAllocator)
8207 VMA_ASSERT(pCreateInfo && pAllocator);
8208 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8214 VmaAllocator allocator)
8216 if(allocator != VK_NULL_HANDLE)
8218 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8219 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8220 vma_delete(&allocationCallbacks, allocator);
8225 VmaAllocator allocator,
8226 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8228 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8229 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8233 VmaAllocator allocator,
8234 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8236 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8237 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8241 VmaAllocator allocator,
8242 uint32_t memoryTypeIndex,
8243 VkMemoryPropertyFlags* pFlags)
8245 VMA_ASSERT(allocator && pFlags);
8246 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8247 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8251 VmaAllocator allocator,
8252 uint32_t frameIndex)
8254 VMA_ASSERT(allocator);
8255 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8257 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8259 allocator->SetCurrentFrameIndex(frameIndex);
8263 VmaAllocator allocator,
8266 VMA_ASSERT(allocator && pStats);
8267 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8268 allocator->CalculateStats(pStats);
8271 #if VMA_STATS_STRING_ENABLED 8274 VmaAllocator allocator,
8275 char** ppStatsString,
8276 VkBool32 detailedMap)
8278 VMA_ASSERT(allocator && ppStatsString);
8279 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8281 VmaStringBuilder sb(allocator);
8283 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8287 allocator->CalculateStats(&stats);
8289 json.WriteString(
"Total");
8290 VmaPrintStatInfo(json, stats.
total);
8292 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8294 json.BeginString(
"Heap ");
8295 json.ContinueString(heapIndex);
8299 json.WriteString(
"Size");
8300 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8302 json.WriteString(
"Flags");
8303 json.BeginArray(
true);
8304 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8306 json.WriteString(
"DEVICE_LOCAL");
8312 json.WriteString(
"Stats");
8313 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8316 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8318 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8320 json.BeginString(
"Type ");
8321 json.ContinueString(typeIndex);
8326 json.WriteString(
"Flags");
8327 json.BeginArray(
true);
8328 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8329 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8331 json.WriteString(
"DEVICE_LOCAL");
8333 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8335 json.WriteString(
"HOST_VISIBLE");
8337 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8339 json.WriteString(
"HOST_COHERENT");
8341 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8343 json.WriteString(
"HOST_CACHED");
8345 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8347 json.WriteString(
"LAZILY_ALLOCATED");
8353 json.WriteString(
"Stats");
8354 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8363 if(detailedMap == VK_TRUE)
8365 allocator->PrintDetailedMap(json);
8371 const size_t len = sb.GetLength();
8372 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8375 memcpy(pChars, sb.GetData(), len);
8378 *ppStatsString = pChars;
8382 VmaAllocator allocator,
8385 if(pStatsString != VMA_NULL)
8387 VMA_ASSERT(allocator);
8388 size_t len = strlen(pStatsString);
8389 vma_delete_array(allocator, pStatsString, len + 1);
8393 #endif // #if VMA_STATS_STRING_ENABLED 8399 VmaAllocator allocator,
8400 uint32_t memoryTypeBits,
8402 uint32_t* pMemoryTypeIndex)
8404 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8405 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8406 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8413 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8417 switch(pAllocationCreateInfo->
usage)
8422 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8425 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8428 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8429 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8432 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8433 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8439 *pMemoryTypeIndex = UINT32_MAX;
8440 uint32_t minCost = UINT32_MAX;
8441 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8442 memTypeIndex < allocator->GetMemoryTypeCount();
8443 ++memTypeIndex, memTypeBit <<= 1)
8446 if((memTypeBit & memoryTypeBits) != 0)
8448 const VkMemoryPropertyFlags currFlags =
8449 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8451 if((requiredFlags & ~currFlags) == 0)
8454 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8456 if(currCost < minCost)
8458 *pMemoryTypeIndex = memTypeIndex;
8468 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8472 VmaAllocator allocator,
8473 const VkBufferCreateInfo* pBufferCreateInfo,
8475 uint32_t* pMemoryTypeIndex)
8477 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8478 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8479 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8480 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8482 const VkDevice hDev = allocator->m_hDevice;
8483 VkBuffer hBuffer = VK_NULL_HANDLE;
8484 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
8485 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
8486 if(res == VK_SUCCESS)
8488 VkMemoryRequirements memReq = {};
8489 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
8490 hDev, hBuffer, &memReq);
8494 memReq.memoryTypeBits,
8495 pAllocationCreateInfo,
8498 allocator->GetVulkanFunctions().vkDestroyBuffer(
8499 hDev, hBuffer, allocator->GetAllocationCallbacks());
8505 VmaAllocator allocator,
8506 const VkImageCreateInfo* pImageCreateInfo,
8508 uint32_t* pMemoryTypeIndex)
8510 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8511 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
8512 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8513 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8515 const VkDevice hDev = allocator->m_hDevice;
8516 VkImage hImage = VK_NULL_HANDLE;
8517 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
8518 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
8519 if(res == VK_SUCCESS)
8521 VkMemoryRequirements memReq = {};
8522 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
8523 hDev, hImage, &memReq);
8527 memReq.memoryTypeBits,
8528 pAllocationCreateInfo,
8531 allocator->GetVulkanFunctions().vkDestroyImage(
8532 hDev, hImage, allocator->GetAllocationCallbacks());
8538 VmaAllocator allocator,
8542 VMA_ASSERT(allocator && pCreateInfo && pPool);
8544 VMA_DEBUG_LOG(
"vmaCreatePool");
8546 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8548 return allocator->CreatePool(pCreateInfo, pPool);
8552 VmaAllocator allocator,
8555 VMA_ASSERT(allocator);
8557 if(pool == VK_NULL_HANDLE)
8562 VMA_DEBUG_LOG(
"vmaDestroyPool");
8564 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8566 allocator->DestroyPool(pool);
8570 VmaAllocator allocator,
8574 VMA_ASSERT(allocator && pool && pPoolStats);
8576 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8578 allocator->GetPoolStats(pool, pPoolStats);
8582 VmaAllocator allocator,
8584 size_t* pLostAllocationCount)
8586 VMA_ASSERT(allocator && pool);
8588 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8590 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8594 VmaAllocator allocator,
8595 const VkMemoryRequirements* pVkMemoryRequirements,
8597 VmaAllocation* pAllocation,
8600 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8602 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8604 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8606 VkResult result = allocator->AllocateMemory(
8607 *pVkMemoryRequirements,
8613 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8616 if(pAllocationInfo && result == VK_SUCCESS)
8618 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8625 VmaAllocator allocator,
8628 VmaAllocation* pAllocation,
8631 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8633 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8635 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8637 VkMemoryRequirements vkMemReq = {};
8638 bool requiresDedicatedAllocation =
false;
8639 bool prefersDedicatedAllocation =
false;
8640 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8641 requiresDedicatedAllocation,
8642 prefersDedicatedAllocation);
8644 VkResult result = allocator->AllocateMemory(
8646 requiresDedicatedAllocation,
8647 prefersDedicatedAllocation,
8651 VMA_SUBALLOCATION_TYPE_BUFFER,
8654 if(pAllocationInfo && result == VK_SUCCESS)
8656 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8663 VmaAllocator allocator,
8666 VmaAllocation* pAllocation,
8669 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8671 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8673 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8675 VkResult result = AllocateMemoryForImage(
8679 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8682 if(pAllocationInfo && result == VK_SUCCESS)
8684 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8691 VmaAllocator allocator,
8692 VmaAllocation allocation)
8694 VMA_ASSERT(allocator && allocation);
8696 VMA_DEBUG_LOG(
"vmaFreeMemory");
8698 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8700 allocator->FreeMemory(allocation);
8704 VmaAllocator allocator,
8705 VmaAllocation allocation,
8708 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8710 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8712 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8716 VmaAllocator allocator,
8717 VmaAllocation allocation)
8719 VMA_ASSERT(allocator && allocation);
8721 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8723 return allocator->TouchAllocation(allocation);
8727 VmaAllocator allocator,
8728 VmaAllocation allocation,
8731 VMA_ASSERT(allocator && allocation);
8733 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8735 allocation->SetUserData(allocator, pUserData);
8739 VmaAllocator allocator,
8740 VmaAllocation* pAllocation)
8742 VMA_ASSERT(allocator && pAllocation);
8744 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8746 allocator->CreateLostAllocation(pAllocation);
8750 VmaAllocator allocator,
8751 VmaAllocation allocation,
8754 VMA_ASSERT(allocator && allocation && ppData);
8756 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8758 return allocator->Map(allocation, ppData);
8762 VmaAllocator allocator,
8763 VmaAllocation allocation)
8765 VMA_ASSERT(allocator && allocation);
8767 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8769 allocator->Unmap(allocation);
8773 VmaAllocator allocator,
8774 VmaAllocation* pAllocations,
8775 size_t allocationCount,
8776 VkBool32* pAllocationsChanged,
8780 VMA_ASSERT(allocator && pAllocations);
8782 VMA_DEBUG_LOG(
"vmaDefragment");
8784 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8786 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8790 VmaAllocator allocator,
8791 const VkBufferCreateInfo* pBufferCreateInfo,
8794 VmaAllocation* pAllocation,
8797 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8799 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8801 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8803 *pBuffer = VK_NULL_HANDLE;
8804 *pAllocation = VK_NULL_HANDLE;
8807 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8808 allocator->m_hDevice,
8810 allocator->GetAllocationCallbacks(),
8815 VkMemoryRequirements vkMemReq = {};
8816 bool requiresDedicatedAllocation =
false;
8817 bool prefersDedicatedAllocation =
false;
8818 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8819 requiresDedicatedAllocation, prefersDedicatedAllocation);
8823 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8825 VMA_ASSERT(vkMemReq.alignment %
8826 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8828 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8830 VMA_ASSERT(vkMemReq.alignment %
8831 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8833 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8835 VMA_ASSERT(vkMemReq.alignment %
8836 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8840 res = allocator->AllocateMemory(
8842 requiresDedicatedAllocation,
8843 prefersDedicatedAllocation,
8846 *pAllocationCreateInfo,
8847 VMA_SUBALLOCATION_TYPE_BUFFER,
8852 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8853 allocator->m_hDevice,
8855 (*pAllocation)->GetMemory(),
8856 (*pAllocation)->GetOffset());
8860 if(pAllocationInfo != VMA_NULL)
8862 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8866 allocator->FreeMemory(*pAllocation);
8867 *pAllocation = VK_NULL_HANDLE;
8868 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8869 *pBuffer = VK_NULL_HANDLE;
8872 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8873 *pBuffer = VK_NULL_HANDLE;
8880 VmaAllocator allocator,
8882 VmaAllocation allocation)
8884 if(buffer != VK_NULL_HANDLE)
8886 VMA_ASSERT(allocator);
8888 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8890 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8892 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8894 allocator->FreeMemory(allocation);
8899 VmaAllocator allocator,
8900 const VkImageCreateInfo* pImageCreateInfo,
8903 VmaAllocation* pAllocation,
8906 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8908 VMA_DEBUG_LOG(
"vmaCreateImage");
8910 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8912 *pImage = VK_NULL_HANDLE;
8913 *pAllocation = VK_NULL_HANDLE;
8916 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8917 allocator->m_hDevice,
8919 allocator->GetAllocationCallbacks(),
8923 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8924 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8925 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8928 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8932 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8933 allocator->m_hDevice,
8935 (*pAllocation)->GetMemory(),
8936 (*pAllocation)->GetOffset());
8940 if(pAllocationInfo != VMA_NULL)
8942 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8946 allocator->FreeMemory(*pAllocation);
8947 *pAllocation = VK_NULL_HANDLE;
8948 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8949 *pImage = VK_NULL_HANDLE;
8952 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8953 *pImage = VK_NULL_HANDLE;
8960 VmaAllocator allocator,
8962 VmaAllocation allocation)
8964 if(image != VK_NULL_HANDLE)
8966 VMA_ASSERT(allocator);
8968 VMA_DEBUG_LOG(
"vmaDestroyImage");
8970 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8972 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8974 allocator->FreeMemory(allocation);
8978 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:943
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1197
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:968
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:953
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1154
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:947
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1503
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:965
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1678
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1373
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1427
Definition: vk_mem_alloc.h:1234
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:936
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1272
Definition: vk_mem_alloc.h:1181
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:977
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1030
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:962
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1185
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1095
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:950
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1094
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:958
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1682
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:994
VmaStatInfo total
Definition: vk_mem_alloc.h:1104
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1690
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1256
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1673
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:951
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:878
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:971
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1381
Definition: vk_mem_alloc.h:1375
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1513
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:948
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1293
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1397
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1433
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:934
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1384
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1132
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1668
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1686
Definition: vk_mem_alloc.h:1171
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1280
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:949
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1100
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:884
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:905
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:910
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1688
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1267
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1443
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:944
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1083
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1392
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:897
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1241
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1096
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:901
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1387
Definition: vk_mem_alloc.h:1180
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1262
Definition: vk_mem_alloc.h:1253
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1086
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:946
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1405
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:980
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1436
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1251
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1286
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1018
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1102
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1221
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1095
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:955
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:899
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:954
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1419
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1527
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:974
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1095
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1092
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1424
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1508
Definition: vk_mem_alloc.h:1249
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1684
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:942
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:957
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1090
Definition: vk_mem_alloc.h:1137
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1377
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1088
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:952
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:956
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1208
Definition: vk_mem_alloc.h:1164
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1522
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:932
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:945
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1489
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1355
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1096
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
TODO finish documentation...
Definition: vk_mem_alloc.h:1247
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1103
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1430
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1096
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1494