23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 868 #include <vulkan/vulkan.h> 870 VK_DEFINE_HANDLE(VmaAllocator)
874 VmaAllocator allocator,
876 VkDeviceMemory memory,
880 VmaAllocator allocator,
882 VkDeviceMemory memory,
1031 VmaAllocator* pAllocator);
1035 VmaAllocator allocator);
1042 VmaAllocator allocator,
1043 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1050 VmaAllocator allocator,
1051 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1060 VmaAllocator allocator,
1061 uint32_t memoryTypeIndex,
1062 VkMemoryPropertyFlags* pFlags);
1073 VmaAllocator allocator,
1074 uint32_t frameIndex);
1104 VmaAllocator allocator,
1107 #define VMA_STATS_STRING_ENABLED 1 1109 #if VMA_STATS_STRING_ENABLED 1115 VmaAllocator allocator,
1116 char** ppStatsString,
1117 VkBool32 detailedMap);
1120 VmaAllocator allocator,
1121 char* pStatsString);
1123 #endif // #if VMA_STATS_STRING_ENABLED 1125 VK_DEFINE_HANDLE(VmaPool)
1308 VmaAllocator allocator,
1309 uint32_t memoryTypeBits,
1311 uint32_t* pMemoryTypeIndex);
1326 VmaAllocator allocator,
1327 const VkBufferCreateInfo* pBufferCreateInfo,
1329 uint32_t* pMemoryTypeIndex);
1344 VmaAllocator allocator,
1345 const VkImageCreateInfo* pImageCreateInfo,
1347 uint32_t* pMemoryTypeIndex);
1448 VmaAllocator allocator,
1455 VmaAllocator allocator,
1465 VmaAllocator allocator,
1476 VmaAllocator allocator,
1478 size_t* pLostAllocationCount);
1480 VK_DEFINE_HANDLE(VmaAllocation)
1536 VmaAllocator allocator,
1537 const VkMemoryRequirements* pVkMemoryRequirements,
1539 VmaAllocation* pAllocation,
1549 VmaAllocator allocator,
1552 VmaAllocation* pAllocation,
1557 VmaAllocator allocator,
1560 VmaAllocation* pAllocation,
1565 VmaAllocator allocator,
1566 VmaAllocation allocation);
1585 VmaAllocator allocator,
1586 VmaAllocation allocation,
1604 VmaAllocator allocator,
1605 VmaAllocation allocation);
1621 VmaAllocator allocator,
1622 VmaAllocation allocation,
1636 VmaAllocator allocator,
1637 VmaAllocation* pAllocation);
1674 VmaAllocator allocator,
1675 VmaAllocation allocation,
1683 VmaAllocator allocator,
1684 VmaAllocation allocation);
1795 VmaAllocator allocator,
1796 VmaAllocation* pAllocations,
1797 size_t allocationCount,
1798 VkBool32* pAllocationsChanged,
1829 VmaAllocator allocator,
1830 const VkBufferCreateInfo* pBufferCreateInfo,
1833 VmaAllocation* pAllocation,
1848 VmaAllocator allocator,
1850 VmaAllocation allocation);
1854 VmaAllocator allocator,
1855 const VkImageCreateInfo* pImageCreateInfo,
1858 VmaAllocation* pAllocation,
1873 VmaAllocator allocator,
1875 VmaAllocation allocation);
1881 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1884 #ifdef __INTELLISENSE__ 1885 #define VMA_IMPLEMENTATION 1888 #ifdef VMA_IMPLEMENTATION 1889 #undef VMA_IMPLEMENTATION 1911 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 1912 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1924 #if VMA_USE_STL_CONTAINERS 1925 #define VMA_USE_STL_VECTOR 1 1926 #define VMA_USE_STL_UNORDERED_MAP 1 1927 #define VMA_USE_STL_LIST 1 1930 #if VMA_USE_STL_VECTOR 1934 #if VMA_USE_STL_UNORDERED_MAP 1935 #include <unordered_map> 1938 #if VMA_USE_STL_LIST 1947 #include <algorithm> 1951 #if !defined(_WIN32) && !defined(__APPLE__) 1957 #define VMA_NULL nullptr 1960 #if defined(__APPLE__) || defined(__ANDROID__) 1962 void *aligned_alloc(
size_t alignment,
size_t size)
1965 if(alignment <
sizeof(
void*))
1967 alignment =
sizeof(
void*);
1971 if(posix_memalign(&pointer, alignment, size) == 0)
1980 #define VMA_ASSERT(expr) assert(expr) 1982 #define VMA_ASSERT(expr) 1988 #ifndef VMA_HEAVY_ASSERT 1990 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1992 #define VMA_HEAVY_ASSERT(expr) 1996 #ifndef VMA_ALIGN_OF 1997 #define VMA_ALIGN_OF(type) (__alignof(type)) 2000 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2002 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2004 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2008 #ifndef VMA_SYSTEM_FREE 2010 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2012 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2017 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2021 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2025 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2029 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2032 #ifndef VMA_DEBUG_LOG 2033 #define VMA_DEBUG_LOG(format, ...) 2043 #if VMA_STATS_STRING_ENABLED 2044 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2046 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2048 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2050 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2052 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2054 snprintf(outStr, strLen,
"%p", ptr);
2064 void Lock() { m_Mutex.lock(); }
2065 void Unlock() { m_Mutex.unlock(); }
2069 #define VMA_MUTEX VmaMutex 2080 #ifndef VMA_ATOMIC_UINT32 2081 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2084 #ifndef VMA_BEST_FIT 2097 #define VMA_BEST_FIT (1) 2100 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2105 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2108 #ifndef VMA_DEBUG_ALIGNMENT 2113 #define VMA_DEBUG_ALIGNMENT (1) 2116 #ifndef VMA_DEBUG_MARGIN 2121 #define VMA_DEBUG_MARGIN (0) 2124 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2129 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2132 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2137 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2140 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2141 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2145 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2146 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2150 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2156 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2157 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2160 static inline uint32_t VmaCountBitsSet(uint32_t v)
2162 uint32_t c = v - ((v >> 1) & 0x55555555);
2163 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2164 c = ((c >> 4) + c) & 0x0F0F0F0F;
2165 c = ((c >> 8) + c) & 0x00FF00FF;
2166 c = ((c >> 16) + c) & 0x0000FFFF;
2172 template <
typename T>
2173 static inline T VmaAlignUp(T val, T align)
2175 return (val + align - 1) / align * align;
2179 template <
typename T>
2180 inline T VmaRoundDiv(T x, T y)
2182 return (x + (y / (T)2)) / y;
2187 template<
typename Iterator,
typename Compare>
2188 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2190 Iterator centerValue = end; --centerValue;
2191 Iterator insertIndex = beg;
2192 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2194 if(cmp(*memTypeIndex, *centerValue))
2196 if(insertIndex != memTypeIndex)
2198 VMA_SWAP(*memTypeIndex, *insertIndex);
2203 if(insertIndex != centerValue)
2205 VMA_SWAP(*insertIndex, *centerValue);
2210 template<
typename Iterator,
typename Compare>
2211 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2215 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2216 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2217 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2221 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2223 #endif // #ifndef VMA_SORT 2232 static inline bool VmaBlocksOnSamePage(
2233 VkDeviceSize resourceAOffset,
2234 VkDeviceSize resourceASize,
2235 VkDeviceSize resourceBOffset,
2236 VkDeviceSize pageSize)
2238 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2239 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2240 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2241 VkDeviceSize resourceBStart = resourceBOffset;
2242 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2243 return resourceAEndPage == resourceBStartPage;
2246 enum VmaSuballocationType
2248 VMA_SUBALLOCATION_TYPE_FREE = 0,
2249 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2250 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2251 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2252 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2253 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2254 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2263 static inline bool VmaIsBufferImageGranularityConflict(
2264 VmaSuballocationType suballocType1,
2265 VmaSuballocationType suballocType2)
2267 if(suballocType1 > suballocType2)
2269 VMA_SWAP(suballocType1, suballocType2);
2272 switch(suballocType1)
2274 case VMA_SUBALLOCATION_TYPE_FREE:
2276 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2278 case VMA_SUBALLOCATION_TYPE_BUFFER:
2280 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2281 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2282 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2284 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2285 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2286 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2287 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2289 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2290 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2302 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2303 m_pMutex(useMutex ? &mutex : VMA_NULL)
2320 VMA_MUTEX* m_pMutex;
2323 #if VMA_DEBUG_GLOBAL_MUTEX 2324 static VMA_MUTEX gDebugGlobalMutex;
2325 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2327 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2331 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2342 template <
typename IterT,
typename KeyT,
typename CmpT>
2343 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2345 size_t down = 0, up = (end - beg);
2348 const size_t mid = (down + up) / 2;
2349 if(cmp(*(beg+mid), key))
2364 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2366 if((pAllocationCallbacks != VMA_NULL) &&
2367 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2369 return (*pAllocationCallbacks->pfnAllocation)(
2370 pAllocationCallbacks->pUserData,
2373 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2377 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2381 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2383 if((pAllocationCallbacks != VMA_NULL) &&
2384 (pAllocationCallbacks->pfnFree != VMA_NULL))
2386 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2390 VMA_SYSTEM_FREE(ptr);
2394 template<
typename T>
2395 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2397 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2400 template<
typename T>
2401 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2403 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2406 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2408 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2410 template<
typename T>
2411 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2414 VmaFree(pAllocationCallbacks, ptr);
2417 template<
typename T>
2418 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2422 for(
size_t i = count; i--; )
2426 VmaFree(pAllocationCallbacks, ptr);
2431 template<
typename T>
2432 class VmaStlAllocator
2435 const VkAllocationCallbacks*
const m_pCallbacks;
2436 typedef T value_type;
2438 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2439 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2441 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2442 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2444 template<
typename U>
2445 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2447 return m_pCallbacks == rhs.m_pCallbacks;
2449 template<
typename U>
2450 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2452 return m_pCallbacks != rhs.m_pCallbacks;
2455 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2458 #if VMA_USE_STL_VECTOR 2460 #define VmaVector std::vector 2462 template<
typename T,
typename allocatorT>
2463 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2465 vec.insert(vec.begin() + index, item);
2468 template<
typename T,
typename allocatorT>
2469 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2471 vec.erase(vec.begin() + index);
2474 #else // #if VMA_USE_STL_VECTOR 2479 template<
typename T,
typename AllocatorT>
2483 typedef T value_type;
2485 VmaVector(
const AllocatorT& allocator) :
2486 m_Allocator(allocator),
2493 VmaVector(
size_t count,
const AllocatorT& allocator) :
2494 m_Allocator(allocator),
2495 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2501 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2502 m_Allocator(src.m_Allocator),
2503 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2504 m_Count(src.m_Count),
2505 m_Capacity(src.m_Count)
2509 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2515 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2518 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2522 resize(rhs.m_Count);
2525 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2531 bool empty()
const {
return m_Count == 0; }
2532 size_t size()
const {
return m_Count; }
2533 T* data() {
return m_pArray; }
2534 const T* data()
const {
return m_pArray; }
2536 T& operator[](
size_t index)
2538 VMA_HEAVY_ASSERT(index < m_Count);
2539 return m_pArray[index];
2541 const T& operator[](
size_t index)
const 2543 VMA_HEAVY_ASSERT(index < m_Count);
2544 return m_pArray[index];
2549 VMA_HEAVY_ASSERT(m_Count > 0);
2552 const T& front()
const 2554 VMA_HEAVY_ASSERT(m_Count > 0);
2559 VMA_HEAVY_ASSERT(m_Count > 0);
2560 return m_pArray[m_Count - 1];
2562 const T& back()
const 2564 VMA_HEAVY_ASSERT(m_Count > 0);
2565 return m_pArray[m_Count - 1];
2568 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2570 newCapacity = VMA_MAX(newCapacity, m_Count);
2572 if((newCapacity < m_Capacity) && !freeMemory)
2574 newCapacity = m_Capacity;
2577 if(newCapacity != m_Capacity)
2579 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2582 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2584 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2585 m_Capacity = newCapacity;
2586 m_pArray = newArray;
2590 void resize(
size_t newCount,
bool freeMemory =
false)
2592 size_t newCapacity = m_Capacity;
2593 if(newCount > m_Capacity)
2595 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2599 newCapacity = newCount;
2602 if(newCapacity != m_Capacity)
2604 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2605 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2606 if(elementsToCopy != 0)
2608 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2610 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2611 m_Capacity = newCapacity;
2612 m_pArray = newArray;
2618 void clear(
bool freeMemory =
false)
2620 resize(0, freeMemory);
2623 void insert(
size_t index,
const T& src)
2625 VMA_HEAVY_ASSERT(index <= m_Count);
2626 const size_t oldCount = size();
2627 resize(oldCount + 1);
2628 if(index < oldCount)
2630 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2632 m_pArray[index] = src;
2635 void remove(
size_t index)
2637 VMA_HEAVY_ASSERT(index < m_Count);
2638 const size_t oldCount = size();
2639 if(index < oldCount - 1)
2641 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2643 resize(oldCount - 1);
2646 void push_back(
const T& src)
2648 const size_t newIndex = size();
2649 resize(newIndex + 1);
2650 m_pArray[newIndex] = src;
2655 VMA_HEAVY_ASSERT(m_Count > 0);
2659 void push_front(
const T& src)
2666 VMA_HEAVY_ASSERT(m_Count > 0);
2670 typedef T* iterator;
2672 iterator begin() {
return m_pArray; }
2673 iterator end() {
return m_pArray + m_Count; }
2676 AllocatorT m_Allocator;
2682 template<
typename T,
typename allocatorT>
2683 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2685 vec.insert(index, item);
2688 template<
typename T,
typename allocatorT>
2689 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2694 #endif // #if VMA_USE_STL_VECTOR 2696 template<
typename CmpLess,
typename VectorT>
2697 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2699 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2701 vector.data() + vector.size(),
2703 CmpLess()) - vector.data();
2704 VmaVectorInsert(vector, indexToInsert, value);
2705 return indexToInsert;
2708 template<
typename CmpLess,
typename VectorT>
2709 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2712 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2717 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2719 size_t indexToRemove = it - vector.begin();
2720 VmaVectorRemove(vector, indexToRemove);
2726 template<
typename CmpLess,
typename VectorT>
2727 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2730 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2732 vector.data() + vector.size(),
2735 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2737 return it - vector.begin();
2741 return vector.size();
2753 template<
typename T>
2754 class VmaPoolAllocator
2757 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2758 ~VmaPoolAllocator();
2766 uint32_t NextFreeIndex;
2773 uint32_t FirstFreeIndex;
2776 const VkAllocationCallbacks* m_pAllocationCallbacks;
2777 size_t m_ItemsPerBlock;
2778 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2780 ItemBlock& CreateNewBlock();
2783 template<
typename T>
2784 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2785 m_pAllocationCallbacks(pAllocationCallbacks),
2786 m_ItemsPerBlock(itemsPerBlock),
2787 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2789 VMA_ASSERT(itemsPerBlock > 0);
2792 template<
typename T>
2793 VmaPoolAllocator<T>::~VmaPoolAllocator()
2798 template<
typename T>
2799 void VmaPoolAllocator<T>::Clear()
2801 for(
size_t i = m_ItemBlocks.size(); i--; )
2802 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2803 m_ItemBlocks.clear();
2806 template<
typename T>
2807 T* VmaPoolAllocator<T>::Alloc()
2809 for(
size_t i = m_ItemBlocks.size(); i--; )
2811 ItemBlock& block = m_ItemBlocks[i];
2813 if(block.FirstFreeIndex != UINT32_MAX)
2815 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2816 block.FirstFreeIndex = pItem->NextFreeIndex;
2817 return &pItem->Value;
2822 ItemBlock& newBlock = CreateNewBlock();
2823 Item*
const pItem = &newBlock.pItems[0];
2824 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2825 return &pItem->Value;
2828 template<
typename T>
2829 void VmaPoolAllocator<T>::Free(T* ptr)
2832 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2834 ItemBlock& block = m_ItemBlocks[i];
2838 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2841 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2843 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2844 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2845 block.FirstFreeIndex = index;
2849 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2852 template<
typename T>
2853 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2855 ItemBlock newBlock = {
2856 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2858 m_ItemBlocks.push_back(newBlock);
2861 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2862 newBlock.pItems[i].NextFreeIndex = i + 1;
2863 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2864 return m_ItemBlocks.back();
2870 #if VMA_USE_STL_LIST 2872 #define VmaList std::list 2874 #else // #if VMA_USE_STL_LIST 2876 template<
typename T>
2885 template<
typename T>
2889 typedef VmaListItem<T> ItemType;
2891 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2895 size_t GetCount()
const {
return m_Count; }
2896 bool IsEmpty()
const {
return m_Count == 0; }
2898 ItemType* Front() {
return m_pFront; }
2899 const ItemType* Front()
const {
return m_pFront; }
2900 ItemType* Back() {
return m_pBack; }
2901 const ItemType* Back()
const {
return m_pBack; }
2903 ItemType* PushBack();
2904 ItemType* PushFront();
2905 ItemType* PushBack(
const T& value);
2906 ItemType* PushFront(
const T& value);
2911 ItemType* InsertBefore(ItemType* pItem);
2913 ItemType* InsertAfter(ItemType* pItem);
2915 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2916 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2918 void Remove(ItemType* pItem);
2921 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2922 VmaPoolAllocator<ItemType> m_ItemAllocator;
2928 VmaRawList(
const VmaRawList<T>& src);
2929 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2932 template<
typename T>
2933 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2934 m_pAllocationCallbacks(pAllocationCallbacks),
2935 m_ItemAllocator(pAllocationCallbacks, 128),
2942 template<
typename T>
2943 VmaRawList<T>::~VmaRawList()
2949 template<
typename T>
2950 void VmaRawList<T>::Clear()
2952 if(IsEmpty() ==
false)
2954 ItemType* pItem = m_pBack;
2955 while(pItem != VMA_NULL)
2957 ItemType*
const pPrevItem = pItem->pPrev;
2958 m_ItemAllocator.Free(pItem);
2961 m_pFront = VMA_NULL;
2967 template<
typename T>
2968 VmaListItem<T>* VmaRawList<T>::PushBack()
2970 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2971 pNewItem->pNext = VMA_NULL;
2974 pNewItem->pPrev = VMA_NULL;
2975 m_pFront = pNewItem;
2981 pNewItem->pPrev = m_pBack;
2982 m_pBack->pNext = pNewItem;
2989 template<
typename T>
2990 VmaListItem<T>* VmaRawList<T>::PushFront()
2992 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2993 pNewItem->pPrev = VMA_NULL;
2996 pNewItem->pNext = VMA_NULL;
2997 m_pFront = pNewItem;
3003 pNewItem->pNext = m_pFront;
3004 m_pFront->pPrev = pNewItem;
3005 m_pFront = pNewItem;
3011 template<
typename T>
3012 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3014 ItemType*
const pNewItem = PushBack();
3015 pNewItem->Value = value;
3019 template<
typename T>
3020 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3022 ItemType*
const pNewItem = PushFront();
3023 pNewItem->Value = value;
3027 template<
typename T>
3028 void VmaRawList<T>::PopBack()
3030 VMA_HEAVY_ASSERT(m_Count > 0);
3031 ItemType*
const pBackItem = m_pBack;
3032 ItemType*
const pPrevItem = pBackItem->pPrev;
3033 if(pPrevItem != VMA_NULL)
3035 pPrevItem->pNext = VMA_NULL;
3037 m_pBack = pPrevItem;
3038 m_ItemAllocator.Free(pBackItem);
3042 template<
typename T>
3043 void VmaRawList<T>::PopFront()
3045 VMA_HEAVY_ASSERT(m_Count > 0);
3046 ItemType*
const pFrontItem = m_pFront;
3047 ItemType*
const pNextItem = pFrontItem->pNext;
3048 if(pNextItem != VMA_NULL)
3050 pNextItem->pPrev = VMA_NULL;
3052 m_pFront = pNextItem;
3053 m_ItemAllocator.Free(pFrontItem);
3057 template<
typename T>
3058 void VmaRawList<T>::Remove(ItemType* pItem)
3060 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3061 VMA_HEAVY_ASSERT(m_Count > 0);
3063 if(pItem->pPrev != VMA_NULL)
3065 pItem->pPrev->pNext = pItem->pNext;
3069 VMA_HEAVY_ASSERT(m_pFront == pItem);
3070 m_pFront = pItem->pNext;
3073 if(pItem->pNext != VMA_NULL)
3075 pItem->pNext->pPrev = pItem->pPrev;
3079 VMA_HEAVY_ASSERT(m_pBack == pItem);
3080 m_pBack = pItem->pPrev;
3083 m_ItemAllocator.Free(pItem);
3087 template<
typename T>
3088 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3090 if(pItem != VMA_NULL)
3092 ItemType*
const prevItem = pItem->pPrev;
3093 ItemType*
const newItem = m_ItemAllocator.Alloc();
3094 newItem->pPrev = prevItem;
3095 newItem->pNext = pItem;
3096 pItem->pPrev = newItem;
3097 if(prevItem != VMA_NULL)
3099 prevItem->pNext = newItem;
3103 VMA_HEAVY_ASSERT(m_pFront == pItem);
3113 template<
typename T>
3114 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3116 if(pItem != VMA_NULL)
3118 ItemType*
const nextItem = pItem->pNext;
3119 ItemType*
const newItem = m_ItemAllocator.Alloc();
3120 newItem->pNext = nextItem;
3121 newItem->pPrev = pItem;
3122 pItem->pNext = newItem;
3123 if(nextItem != VMA_NULL)
3125 nextItem->pPrev = newItem;
3129 VMA_HEAVY_ASSERT(m_pBack == pItem);
3139 template<
typename T>
3140 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3142 ItemType*
const newItem = InsertBefore(pItem);
3143 newItem->Value = value;
3147 template<
typename T>
3148 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3150 ItemType*
const newItem = InsertAfter(pItem);
3151 newItem->Value = value;
3155 template<
typename T,
typename AllocatorT>
3168 T& operator*()
const 3170 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3171 return m_pItem->Value;
3173 T* operator->()
const 3175 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3176 return &m_pItem->Value;
3179 iterator& operator++()
3181 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3182 m_pItem = m_pItem->pNext;
3185 iterator& operator--()
3187 if(m_pItem != VMA_NULL)
3189 m_pItem = m_pItem->pPrev;
3193 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3194 m_pItem = m_pList->Back();
3199 iterator operator++(
int)
3201 iterator result = *
this;
3205 iterator operator--(
int)
3207 iterator result = *
this;
3212 bool operator==(
const iterator& rhs)
const 3214 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3215 return m_pItem == rhs.m_pItem;
3217 bool operator!=(
const iterator& rhs)
const 3219 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3220 return m_pItem != rhs.m_pItem;
3224 VmaRawList<T>* m_pList;
3225 VmaListItem<T>* m_pItem;
3227 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3233 friend class VmaList<T, AllocatorT>;
3236 class const_iterator
3245 const_iterator(
const iterator& src) :
3246 m_pList(src.m_pList),
3247 m_pItem(src.m_pItem)
3251 const T& operator*()
const 3253 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3254 return m_pItem->Value;
3256 const T* operator->()
const 3258 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3259 return &m_pItem->Value;
3262 const_iterator& operator++()
3264 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3265 m_pItem = m_pItem->pNext;
3268 const_iterator& operator--()
3270 if(m_pItem != VMA_NULL)
3272 m_pItem = m_pItem->pPrev;
3276 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3277 m_pItem = m_pList->Back();
3282 const_iterator operator++(
int)
3284 const_iterator result = *
this;
3288 const_iterator operator--(
int)
3290 const_iterator result = *
this;
3295 bool operator==(
const const_iterator& rhs)
const 3297 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3298 return m_pItem == rhs.m_pItem;
3300 bool operator!=(
const const_iterator& rhs)
const 3302 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3303 return m_pItem != rhs.m_pItem;
3307 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3313 const VmaRawList<T>* m_pList;
3314 const VmaListItem<T>* m_pItem;
3316 friend class VmaList<T, AllocatorT>;
3319 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3321 bool empty()
const {
return m_RawList.IsEmpty(); }
3322 size_t size()
const {
return m_RawList.GetCount(); }
3324 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3325 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3327 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3328 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3330 void clear() { m_RawList.Clear(); }
3331 void push_back(
const T& value) { m_RawList.PushBack(value); }
3332 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3333 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3336 VmaRawList<T> m_RawList;
3339 #endif // #if VMA_USE_STL_LIST 3347 #if VMA_USE_STL_UNORDERED_MAP 3349 #define VmaPair std::pair 3351 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3352 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3354 #else // #if VMA_USE_STL_UNORDERED_MAP 3356 template<
typename T1,
typename T2>
3362 VmaPair() : first(), second() { }
3363 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3369 template<
typename KeyT,
typename ValueT>
3373 typedef VmaPair<KeyT, ValueT> PairType;
3374 typedef PairType* iterator;
3376 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3378 iterator begin() {
return m_Vector.begin(); }
3379 iterator end() {
return m_Vector.end(); }
3381 void insert(
const PairType& pair);
3382 iterator find(
const KeyT& key);
3383 void erase(iterator it);
3386 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3389 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3391 template<
typename FirstT,
typename SecondT>
3392 struct VmaPairFirstLess
3394 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3396 return lhs.first < rhs.first;
3398 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3400 return lhs.first < rhsFirst;
3404 template<
typename KeyT,
typename ValueT>
3405 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3407 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3409 m_Vector.data() + m_Vector.size(),
3411 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3412 VmaVectorInsert(m_Vector, indexToInsert, pair);
3415 template<
typename KeyT,
typename ValueT>
3416 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3418 PairType* it = VmaBinaryFindFirstNotLess(
3420 m_Vector.data() + m_Vector.size(),
3422 VmaPairFirstLess<KeyT, ValueT>());
3423 if((it != m_Vector.end()) && (it->first == key))
3429 return m_Vector.end();
3433 template<
typename KeyT,
typename ValueT>
3434 void VmaMap<KeyT, ValueT>::erase(iterator it)
3436 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3439 #endif // #if VMA_USE_STL_UNORDERED_MAP 3445 class VmaDeviceMemoryBlock;
3447 struct VmaAllocation_T
3450 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3454 FLAG_USER_DATA_STRING = 0x01,
3458 enum ALLOCATION_TYPE
3460 ALLOCATION_TYPE_NONE,
3461 ALLOCATION_TYPE_BLOCK,
3462 ALLOCATION_TYPE_DEDICATED,
3465 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3468 m_pUserData(VMA_NULL),
3469 m_LastUseFrameIndex(currentFrameIndex),
3470 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3471 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3473 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3479 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3482 VMA_ASSERT(m_pUserData == VMA_NULL);
3485 void InitBlockAllocation(
3487 VmaDeviceMemoryBlock* block,
3488 VkDeviceSize offset,
3489 VkDeviceSize alignment,
3491 VmaSuballocationType suballocationType,
3495 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3496 VMA_ASSERT(block != VMA_NULL);
3497 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3498 m_Alignment = alignment;
3500 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3501 m_SuballocationType = (uint8_t)suballocationType;
3502 m_BlockAllocation.m_hPool = hPool;
3503 m_BlockAllocation.m_Block = block;
3504 m_BlockAllocation.m_Offset = offset;
3505 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3510 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3511 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3512 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3513 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3514 m_BlockAllocation.m_Block = VMA_NULL;
3515 m_BlockAllocation.m_Offset = 0;
3516 m_BlockAllocation.m_CanBecomeLost =
true;
3519 void ChangeBlockAllocation(
3520 VmaAllocator hAllocator,
3521 VmaDeviceMemoryBlock* block,
3522 VkDeviceSize offset);
3525 void InitDedicatedAllocation(
3526 uint32_t memoryTypeIndex,
3527 VkDeviceMemory hMemory,
3528 VmaSuballocationType suballocationType,
3532 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3533 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3534 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3537 m_SuballocationType = (uint8_t)suballocationType;
3538 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3539 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3540 m_DedicatedAllocation.m_hMemory = hMemory;
3541 m_DedicatedAllocation.m_pMappedData = pMappedData;
3544 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3545 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3546 VkDeviceSize GetSize()
const {
return m_Size; }
3547 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3548 void* GetUserData()
const {
return m_pUserData; }
3549 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3550 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3552 VmaDeviceMemoryBlock* GetBlock()
const 3554 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3555 return m_BlockAllocation.m_Block;
3557 VkDeviceSize GetOffset()
const;
3558 VkDeviceMemory GetMemory()
const;
3559 uint32_t GetMemoryTypeIndex()
const;
3560 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3561 void* GetMappedData()
const;
3562 bool CanBecomeLost()
const;
3563 VmaPool GetPool()
const;
3565 uint32_t GetLastUseFrameIndex()
const 3567 return m_LastUseFrameIndex.load();
3569 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3571 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3581 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3583 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3585 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3596 void BlockAllocMap();
3597 void BlockAllocUnmap();
3598 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3599 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3602 VkDeviceSize m_Alignment;
3603 VkDeviceSize m_Size;
3605 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3607 uint8_t m_SuballocationType;
3614 struct BlockAllocation
3617 VmaDeviceMemoryBlock* m_Block;
3618 VkDeviceSize m_Offset;
3619 bool m_CanBecomeLost;
3623 struct DedicatedAllocation
3625 uint32_t m_MemoryTypeIndex;
3626 VkDeviceMemory m_hMemory;
3627 void* m_pMappedData;
3633 BlockAllocation m_BlockAllocation;
3635 DedicatedAllocation m_DedicatedAllocation;
3638 void FreeUserDataString(VmaAllocator hAllocator);
3645 struct VmaSuballocation
3647 VkDeviceSize offset;
3649 VmaAllocation hAllocation;
3650 VmaSuballocationType type;
3653 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3656 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3671 struct VmaAllocationRequest
3673 VkDeviceSize offset;
3674 VkDeviceSize sumFreeSize;
3675 VkDeviceSize sumItemSize;
3676 VmaSuballocationList::iterator item;
3677 size_t itemsToMakeLostCount;
3679 VkDeviceSize CalcCost()
const 3681 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3689 class VmaBlockMetadata
3692 VmaBlockMetadata(VmaAllocator hAllocator);
3693 ~VmaBlockMetadata();
3694 void Init(VkDeviceSize size);
3697 bool Validate()
const;
3698 VkDeviceSize GetSize()
const {
return m_Size; }
3699 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3700 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3701 VkDeviceSize GetUnusedRangeSizeMax()
const;
3703 bool IsEmpty()
const;
3705 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3708 #if VMA_STATS_STRING_ENABLED 3709 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3713 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3718 bool CreateAllocationRequest(
3719 uint32_t currentFrameIndex,
3720 uint32_t frameInUseCount,
3721 VkDeviceSize bufferImageGranularity,
3722 VkDeviceSize allocSize,
3723 VkDeviceSize allocAlignment,
3724 VmaSuballocationType allocType,
3725 bool canMakeOtherLost,
3726 VmaAllocationRequest* pAllocationRequest);
3728 bool MakeRequestedAllocationsLost(
3729 uint32_t currentFrameIndex,
3730 uint32_t frameInUseCount,
3731 VmaAllocationRequest* pAllocationRequest);
3733 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3737 const VmaAllocationRequest& request,
3738 VmaSuballocationType type,
3739 VkDeviceSize allocSize,
3740 VmaAllocation hAllocation);
3743 void Free(
const VmaAllocation allocation);
3744 void FreeAtOffset(VkDeviceSize offset);
3747 VkDeviceSize m_Size;
3748 uint32_t m_FreeCount;
3749 VkDeviceSize m_SumFreeSize;
3750 VmaSuballocationList m_Suballocations;
3753 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3755 bool ValidateFreeSuballocationList()
const;
3759 bool CheckAllocation(
3760 uint32_t currentFrameIndex,
3761 uint32_t frameInUseCount,
3762 VkDeviceSize bufferImageGranularity,
3763 VkDeviceSize allocSize,
3764 VkDeviceSize allocAlignment,
3765 VmaSuballocationType allocType,
3766 VmaSuballocationList::const_iterator suballocItem,
3767 bool canMakeOtherLost,
3768 VkDeviceSize* pOffset,
3769 size_t* itemsToMakeLostCount,
3770 VkDeviceSize* pSumFreeSize,
3771 VkDeviceSize* pSumItemSize)
const;
3773 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3777 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3780 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3783 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3787 class VmaDeviceMemoryMapping
3790 VmaDeviceMemoryMapping();
3791 ~VmaDeviceMemoryMapping();
3793 void* GetMappedData()
const {
return m_pMappedData; }
3796 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData);
3797 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count);
3801 uint32_t m_MapCount;
3802 void* m_pMappedData;
3811 class VmaDeviceMemoryBlock
3814 uint32_t m_MemoryTypeIndex;
3815 VkDeviceMemory m_hMemory;
3816 VmaDeviceMemoryMapping m_Mapping;
3817 VmaBlockMetadata m_Metadata;
3819 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3821 ~VmaDeviceMemoryBlock()
3823 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3828 uint32_t newMemoryTypeIndex,
3829 VkDeviceMemory newMemory,
3830 VkDeviceSize newSize);
3832 void Destroy(VmaAllocator allocator);
3835 bool Validate()
const;
3838 VkResult Map(VmaAllocator hAllocator, uint32_t count,
void** ppData);
3839 void Unmap(VmaAllocator hAllocator, uint32_t count);
3842 struct VmaPointerLess
3844 bool operator()(
const void* lhs,
const void* rhs)
const 3850 class VmaDefragmentator;
3858 struct VmaBlockVector
3861 VmaAllocator hAllocator,
3862 uint32_t memoryTypeIndex,
3863 VkDeviceSize preferredBlockSize,
3864 size_t minBlockCount,
3865 size_t maxBlockCount,
3866 VkDeviceSize bufferImageGranularity,
3867 uint32_t frameInUseCount,
3871 VkResult CreateMinBlocks();
3873 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3874 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3875 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3876 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3880 bool IsEmpty()
const {
return m_Blocks.empty(); }
3883 VmaPool hCurrentPool,
3884 uint32_t currentFrameIndex,
3885 const VkMemoryRequirements& vkMemReq,
3887 VmaSuballocationType suballocType,
3888 VmaAllocation* pAllocation);
3891 VmaAllocation hAllocation);
3896 #if VMA_STATS_STRING_ENABLED 3897 void PrintDetailedMap(
class VmaJsonWriter& json);
3900 void MakePoolAllocationsLost(
3901 uint32_t currentFrameIndex,
3902 size_t* pLostAllocationCount);
3904 VmaDefragmentator* EnsureDefragmentator(
3905 VmaAllocator hAllocator,
3906 uint32_t currentFrameIndex);
3908 VkResult Defragment(
3910 VkDeviceSize& maxBytesToMove,
3911 uint32_t& maxAllocationsToMove);
3913 void DestroyDefragmentator();
3916 friend class VmaDefragmentator;
3918 const VmaAllocator m_hAllocator;
3919 const uint32_t m_MemoryTypeIndex;
3920 const VkDeviceSize m_PreferredBlockSize;
3921 const size_t m_MinBlockCount;
3922 const size_t m_MaxBlockCount;
3923 const VkDeviceSize m_BufferImageGranularity;
3924 const uint32_t m_FrameInUseCount;
3925 const bool m_IsCustomPool;
3928 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3932 bool m_HasEmptyBlock;
3933 VmaDefragmentator* m_pDefragmentator;
3935 size_t CalcMaxBlockSize()
const;
3938 void Remove(VmaDeviceMemoryBlock* pBlock);
3942 void IncrementallySortBlocks();
3944 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3950 VmaBlockVector m_BlockVector;
3954 VmaAllocator hAllocator,
3958 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3960 #if VMA_STATS_STRING_ENABLED 3965 class VmaDefragmentator
3967 const VmaAllocator m_hAllocator;
3968 VmaBlockVector*
const m_pBlockVector;
3969 uint32_t m_CurrentFrameIndex;
3970 VkDeviceSize m_BytesMoved;
3971 uint32_t m_AllocationsMoved;
3973 struct AllocationInfo
3975 VmaAllocation m_hAllocation;
3976 VkBool32* m_pChanged;
3979 m_hAllocation(VK_NULL_HANDLE),
3980 m_pChanged(VMA_NULL)
3985 struct AllocationInfoSizeGreater
3987 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3989 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3994 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3998 VmaDeviceMemoryBlock* m_pBlock;
3999 bool m_HasNonMovableAllocations;
4000 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4002 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4004 m_HasNonMovableAllocations(true),
4005 m_Allocations(pAllocationCallbacks),
4006 m_pMappedDataForDefragmentation(VMA_NULL)
4010 void CalcHasNonMovableAllocations()
4012 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4013 const size_t defragmentAllocCount = m_Allocations.size();
4014 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4017 void SortAllocationsBySizeDescecnding()
4019 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4022 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
4023 void Unmap(VmaAllocator hAllocator);
4027 void* m_pMappedDataForDefragmentation;
4030 struct BlockPointerLess
4032 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4034 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4036 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4038 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4044 struct BlockInfoCompareMoveDestination
4046 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4048 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4052 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4056 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4064 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4065 BlockInfoVector m_Blocks;
4067 VkResult DefragmentRound(
4068 VkDeviceSize maxBytesToMove,
4069 uint32_t maxAllocationsToMove);
4071 static bool MoveMakesSense(
4072 size_t dstBlockIndex, VkDeviceSize dstOffset,
4073 size_t srcBlockIndex, VkDeviceSize srcOffset);
4077 VmaAllocator hAllocator,
4078 VmaBlockVector* pBlockVector,
4079 uint32_t currentFrameIndex);
4081 ~VmaDefragmentator();
4083 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4084 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4086 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
4088 VkResult Defragment(
4089 VkDeviceSize maxBytesToMove,
4090 uint32_t maxAllocationsToMove);
4094 struct VmaAllocator_T
4097 bool m_UseKhrDedicatedAllocation;
4099 bool m_AllocationCallbacksSpecified;
4100 VkAllocationCallbacks m_AllocationCallbacks;
4104 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4105 VMA_MUTEX m_HeapSizeLimitMutex;
4107 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4108 VkPhysicalDeviceMemoryProperties m_MemProps;
4111 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4114 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4115 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4116 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4121 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4123 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4127 return m_VulkanFunctions;
4130 VkDeviceSize GetBufferImageGranularity()
const 4133 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4134 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4137 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4138 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4140 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4142 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4143 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4146 void GetBufferMemoryRequirements(
4148 VkMemoryRequirements& memReq,
4149 bool& requiresDedicatedAllocation,
4150 bool& prefersDedicatedAllocation)
const;
4151 void GetImageMemoryRequirements(
4153 VkMemoryRequirements& memReq,
4154 bool& requiresDedicatedAllocation,
4155 bool& prefersDedicatedAllocation)
const;
4158 VkResult AllocateMemory(
4159 const VkMemoryRequirements& vkMemReq,
4160 bool requiresDedicatedAllocation,
4161 bool prefersDedicatedAllocation,
4162 VkBuffer dedicatedBuffer,
4163 VkImage dedicatedImage,
4165 VmaSuballocationType suballocType,
4166 VmaAllocation* pAllocation);
4169 void FreeMemory(
const VmaAllocation allocation);
4171 void CalculateStats(
VmaStats* pStats);
4173 #if VMA_STATS_STRING_ENABLED 4174 void PrintDetailedMap(
class VmaJsonWriter& json);
4177 VkResult Defragment(
4178 VmaAllocation* pAllocations,
4179 size_t allocationCount,
4180 VkBool32* pAllocationsChanged,
4184 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
4185 bool TouchAllocation(VmaAllocation hAllocation);
4188 void DestroyPool(VmaPool pool);
4189 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
4191 void SetCurrentFrameIndex(uint32_t frameIndex);
4193 void MakePoolAllocationsLost(
4195 size_t* pLostAllocationCount);
4197 void CreateLostAllocation(VmaAllocation* pAllocation);
4199 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4200 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4202 VkResult Map(VmaAllocation hAllocation,
void** ppData);
4203 void Unmap(VmaAllocation hAllocation);
4206 VkDeviceSize m_PreferredLargeHeapBlockSize;
4208 VkPhysicalDevice m_PhysicalDevice;
4209 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4211 VMA_MUTEX m_PoolsMutex;
4213 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4219 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4221 VkResult AllocateMemoryOfType(
4222 const VkMemoryRequirements& vkMemReq,
4223 bool dedicatedAllocation,
4224 VkBuffer dedicatedBuffer,
4225 VkImage dedicatedImage,
4227 uint32_t memTypeIndex,
4228 VmaSuballocationType suballocType,
4229 VmaAllocation* pAllocation);
4232 VkResult AllocateDedicatedMemory(
4234 VmaSuballocationType suballocType,
4235 uint32_t memTypeIndex,
4237 bool isUserDataString,
4239 VkBuffer dedicatedBuffer,
4240 VkImage dedicatedImage,
4241 VmaAllocation* pAllocation);
4244 void FreeDedicatedMemory(VmaAllocation allocation);
4250 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
4252 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4255 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
4257 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4260 template<
typename T>
4261 static T* VmaAllocate(VmaAllocator hAllocator)
4263 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4266 template<
typename T>
4267 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
4269 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4272 template<
typename T>
4273 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4278 VmaFree(hAllocator, ptr);
4282 template<
typename T>
4283 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4287 for(
size_t i = count; i--; )
4289 VmaFree(hAllocator, ptr);
4296 #if VMA_STATS_STRING_ENABLED 4298 class VmaStringBuilder
4301 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4302 size_t GetLength()
const {
return m_Data.size(); }
4303 const char* GetData()
const {
return m_Data.data(); }
4305 void Add(
char ch) { m_Data.push_back(ch); }
4306 void Add(
const char* pStr);
4307 void AddNewLine() { Add(
'\n'); }
4308 void AddNumber(uint32_t num);
4309 void AddNumber(uint64_t num);
4310 void AddPointer(
const void* ptr);
4313 VmaVector< char, VmaStlAllocator<char> > m_Data;
4316 void VmaStringBuilder::Add(
const char* pStr)
4318 const size_t strLen = strlen(pStr);
4321 const size_t oldCount = m_Data.size();
4322 m_Data.resize(oldCount + strLen);
4323 memcpy(m_Data.data() + oldCount, pStr, strLen);
4327 void VmaStringBuilder::AddNumber(uint32_t num)
4330 VmaUint32ToStr(buf,
sizeof(buf), num);
4334 void VmaStringBuilder::AddNumber(uint64_t num)
4337 VmaUint64ToStr(buf,
sizeof(buf), num);
4341 void VmaStringBuilder::AddPointer(
const void* ptr)
4344 VmaPtrToStr(buf,
sizeof(buf), ptr);
4348 #endif // #if VMA_STATS_STRING_ENABLED 4353 #if VMA_STATS_STRING_ENABLED 4358 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4361 void BeginObject(
bool singleLine =
false);
4364 void BeginArray(
bool singleLine =
false);
4367 void WriteString(
const char* pStr);
4368 void BeginString(
const char* pStr = VMA_NULL);
4369 void ContinueString(
const char* pStr);
4370 void ContinueString(uint32_t n);
4371 void ContinueString(uint64_t n);
4372 void ContinueString_Pointer(
const void* ptr);
4373 void EndString(
const char* pStr = VMA_NULL);
4375 void WriteNumber(uint32_t n);
4376 void WriteNumber(uint64_t n);
4377 void WriteBool(
bool b);
4381 static const char*
const INDENT;
4383 enum COLLECTION_TYPE
4385 COLLECTION_TYPE_OBJECT,
4386 COLLECTION_TYPE_ARRAY,
4390 COLLECTION_TYPE type;
4391 uint32_t valueCount;
4392 bool singleLineMode;
4395 VmaStringBuilder& m_SB;
4396 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4397 bool m_InsideString;
4399 void BeginValue(
bool isString);
4400 void WriteIndent(
bool oneLess =
false);
4403 const char*
const VmaJsonWriter::INDENT =
" ";
4405 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4407 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4408 m_InsideString(false)
4412 VmaJsonWriter::~VmaJsonWriter()
4414 VMA_ASSERT(!m_InsideString);
4415 VMA_ASSERT(m_Stack.empty());
4418 void VmaJsonWriter::BeginObject(
bool singleLine)
4420 VMA_ASSERT(!m_InsideString);
4426 item.type = COLLECTION_TYPE_OBJECT;
4427 item.valueCount = 0;
4428 item.singleLineMode = singleLine;
4429 m_Stack.push_back(item);
4432 void VmaJsonWriter::EndObject()
4434 VMA_ASSERT(!m_InsideString);
4439 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4443 void VmaJsonWriter::BeginArray(
bool singleLine)
4445 VMA_ASSERT(!m_InsideString);
4451 item.type = COLLECTION_TYPE_ARRAY;
4452 item.valueCount = 0;
4453 item.singleLineMode = singleLine;
4454 m_Stack.push_back(item);
4457 void VmaJsonWriter::EndArray()
4459 VMA_ASSERT(!m_InsideString);
4464 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4468 void VmaJsonWriter::WriteString(
const char* pStr)
4474 void VmaJsonWriter::BeginString(
const char* pStr)
4476 VMA_ASSERT(!m_InsideString);
4480 m_InsideString =
true;
4481 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4483 ContinueString(pStr);
4487 void VmaJsonWriter::ContinueString(
const char* pStr)
4489 VMA_ASSERT(m_InsideString);
4491 const size_t strLen = strlen(pStr);
4492 for(
size_t i = 0; i < strLen; ++i)
4525 VMA_ASSERT(0 &&
"Character not currently supported.");
4531 void VmaJsonWriter::ContinueString(uint32_t n)
4533 VMA_ASSERT(m_InsideString);
4537 void VmaJsonWriter::ContinueString(uint64_t n)
4539 VMA_ASSERT(m_InsideString);
4543 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4545 VMA_ASSERT(m_InsideString);
4546 m_SB.AddPointer(ptr);
4549 void VmaJsonWriter::EndString(
const char* pStr)
4551 VMA_ASSERT(m_InsideString);
4552 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4554 ContinueString(pStr);
4557 m_InsideString =
false;
4560 void VmaJsonWriter::WriteNumber(uint32_t n)
4562 VMA_ASSERT(!m_InsideString);
4567 void VmaJsonWriter::WriteNumber(uint64_t n)
4569 VMA_ASSERT(!m_InsideString);
4574 void VmaJsonWriter::WriteBool(
bool b)
4576 VMA_ASSERT(!m_InsideString);
4578 m_SB.Add(b ?
"true" :
"false");
4581 void VmaJsonWriter::WriteNull()
4583 VMA_ASSERT(!m_InsideString);
4588 void VmaJsonWriter::BeginValue(
bool isString)
4590 if(!m_Stack.empty())
4592 StackItem& currItem = m_Stack.back();
4593 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4594 currItem.valueCount % 2 == 0)
4596 VMA_ASSERT(isString);
4599 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4600 currItem.valueCount % 2 != 0)
4604 else if(currItem.valueCount > 0)
4613 ++currItem.valueCount;
4617 void VmaJsonWriter::WriteIndent(
bool oneLess)
4619 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4623 size_t count = m_Stack.size();
4624 if(count > 0 && oneLess)
4628 for(
size_t i = 0; i < count; ++i)
4635 #endif // #if VMA_STATS_STRING_ENABLED 4639 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4641 if(IsUserDataString())
4643 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4645 FreeUserDataString(hAllocator);
4647 if(pUserData != VMA_NULL)
4649 const char*
const newStrSrc = (
char*)pUserData;
4650 const size_t newStrLen = strlen(newStrSrc);
4651 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4652 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4653 m_pUserData = newStrDst;
4658 m_pUserData = pUserData;
4662 void VmaAllocation_T::ChangeBlockAllocation(
4663 VmaAllocator hAllocator,
4664 VmaDeviceMemoryBlock* block,
4665 VkDeviceSize offset)
4667 VMA_ASSERT(block != VMA_NULL);
4668 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4671 if(block != m_BlockAllocation.m_Block)
4673 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4674 if(IsPersistentMap())
4676 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4677 block->Map(hAllocator, mapRefCount, VMA_NULL);
4680 m_BlockAllocation.m_Block = block;
4681 m_BlockAllocation.m_Offset = offset;
4684 VkDeviceSize VmaAllocation_T::GetOffset()
const 4688 case ALLOCATION_TYPE_BLOCK:
4689 return m_BlockAllocation.m_Offset;
4690 case ALLOCATION_TYPE_DEDICATED:
4698 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4702 case ALLOCATION_TYPE_BLOCK:
4703 return m_BlockAllocation.m_Block->m_hMemory;
4704 case ALLOCATION_TYPE_DEDICATED:
4705 return m_DedicatedAllocation.m_hMemory;
4708 return VK_NULL_HANDLE;
4712 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4716 case ALLOCATION_TYPE_BLOCK:
4717 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4718 case ALLOCATION_TYPE_DEDICATED:
4719 return m_DedicatedAllocation.m_MemoryTypeIndex;
4726 void* VmaAllocation_T::GetMappedData()
const 4730 case ALLOCATION_TYPE_BLOCK:
4733 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4734 VMA_ASSERT(pBlockData != VMA_NULL);
4735 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4742 case ALLOCATION_TYPE_DEDICATED:
4743 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4744 return m_DedicatedAllocation.m_pMappedData;
4751 bool VmaAllocation_T::CanBecomeLost()
const 4755 case ALLOCATION_TYPE_BLOCK:
4756 return m_BlockAllocation.m_CanBecomeLost;
4757 case ALLOCATION_TYPE_DEDICATED:
4765 VmaPool VmaAllocation_T::GetPool()
const 4767 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4768 return m_BlockAllocation.m_hPool;
4771 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4773 VMA_ASSERT(CanBecomeLost());
4779 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4782 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4787 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4793 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4803 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4805 VMA_ASSERT(IsUserDataString());
4806 if(m_pUserData != VMA_NULL)
4808 char*
const oldStr = (
char*)m_pUserData;
4809 const size_t oldStrLen = strlen(oldStr);
4810 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4811 m_pUserData = VMA_NULL;
4815 void VmaAllocation_T::BlockAllocMap()
4817 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4819 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4825 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4829 void VmaAllocation_T::BlockAllocUnmap()
4831 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4833 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4839 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4843 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4845 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4849 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4851 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4852 *ppData = m_DedicatedAllocation.m_pMappedData;
4858 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4859 return VK_ERROR_MEMORY_MAP_FAILED;
4864 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4865 hAllocator->m_hDevice,
4866 m_DedicatedAllocation.m_hMemory,
4871 if(result == VK_SUCCESS)
4873 m_DedicatedAllocation.m_pMappedData = *ppData;
4880 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4882 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4884 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4889 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4890 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4891 hAllocator->m_hDevice,
4892 m_DedicatedAllocation.m_hMemory);
4897 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4901 #if VMA_STATS_STRING_ENABLED 4904 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4913 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4917 json.WriteString(
"Blocks");
4920 json.WriteString(
"Allocations");
4923 json.WriteString(
"UnusedRanges");
4926 json.WriteString(
"UsedBytes");
4929 json.WriteString(
"UnusedBytes");
4934 json.WriteString(
"AllocationSize");
4935 json.BeginObject(
true);
4936 json.WriteString(
"Min");
4938 json.WriteString(
"Avg");
4940 json.WriteString(
"Max");
4947 json.WriteString(
"UnusedRangeSize");
4948 json.BeginObject(
true);
4949 json.WriteString(
"Min");
4951 json.WriteString(
"Avg");
4953 json.WriteString(
"Max");
4961 #endif // #if VMA_STATS_STRING_ENABLED 4963 struct VmaSuballocationItemSizeLess
4966 const VmaSuballocationList::iterator lhs,
4967 const VmaSuballocationList::iterator rhs)
const 4969 return lhs->size < rhs->size;
4972 const VmaSuballocationList::iterator lhs,
4973 VkDeviceSize rhsSize)
const 4975 return lhs->size < rhsSize;
4982 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4986 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4987 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4991 VmaBlockMetadata::~VmaBlockMetadata()
4995 void VmaBlockMetadata::Init(VkDeviceSize size)
4999 m_SumFreeSize = size;
5001 VmaSuballocation suballoc = {};
5002 suballoc.offset = 0;
5003 suballoc.size = size;
5004 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5005 suballoc.hAllocation = VK_NULL_HANDLE;
5007 m_Suballocations.push_back(suballoc);
5008 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5010 m_FreeSuballocationsBySize.push_back(suballocItem);
5013 bool VmaBlockMetadata::Validate()
const 5015 if(m_Suballocations.empty())
5021 VkDeviceSize calculatedOffset = 0;
5023 uint32_t calculatedFreeCount = 0;
5025 VkDeviceSize calculatedSumFreeSize = 0;
5028 size_t freeSuballocationsToRegister = 0;
5030 bool prevFree =
false;
5032 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5033 suballocItem != m_Suballocations.cend();
5036 const VmaSuballocation& subAlloc = *suballocItem;
5039 if(subAlloc.offset != calculatedOffset)
5044 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5046 if(prevFree && currFree)
5051 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5058 calculatedSumFreeSize += subAlloc.size;
5059 ++calculatedFreeCount;
5060 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5062 ++freeSuballocationsToRegister;
5067 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5071 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5077 calculatedOffset += subAlloc.size;
5078 prevFree = currFree;
5083 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5088 VkDeviceSize lastSize = 0;
5089 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5091 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5094 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5099 if(suballocItem->size < lastSize)
5104 lastSize = suballocItem->size;
5108 if(!ValidateFreeSuballocationList() ||
5109 (calculatedOffset != m_Size) ||
5110 (calculatedSumFreeSize != m_SumFreeSize) ||
5111 (calculatedFreeCount != m_FreeCount))
5119 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5121 if(!m_FreeSuballocationsBySize.empty())
5123 return m_FreeSuballocationsBySize.back()->size;
5131 bool VmaBlockMetadata::IsEmpty()
const 5133 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5136 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5140 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5152 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5153 suballocItem != m_Suballocations.cend();
5156 const VmaSuballocation& suballoc = *suballocItem;
5157 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5170 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5172 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5174 inoutStats.
size += m_Size;
5181 #if VMA_STATS_STRING_ENABLED 5183 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5187 json.WriteString(
"TotalBytes");
5188 json.WriteNumber(m_Size);
5190 json.WriteString(
"UnusedBytes");
5191 json.WriteNumber(m_SumFreeSize);
5193 json.WriteString(
"Allocations");
5194 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5196 json.WriteString(
"UnusedRanges");
5197 json.WriteNumber(m_FreeCount);
5199 json.WriteString(
"Suballocations");
5202 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5203 suballocItem != m_Suballocations.cend();
5204 ++suballocItem, ++i)
5206 json.BeginObject(
true);
5208 json.WriteString(
"Type");
5209 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5211 json.WriteString(
"Size");
5212 json.WriteNumber(suballocItem->size);
5214 json.WriteString(
"Offset");
5215 json.WriteNumber(suballocItem->offset);
5217 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5219 const void* pUserData = suballocItem->hAllocation->GetUserData();
5220 if(pUserData != VMA_NULL)
5222 json.WriteString(
"UserData");
5223 if(suballocItem->hAllocation->IsUserDataString())
5225 json.WriteString((
const char*)pUserData);
5230 json.ContinueString_Pointer(pUserData);
5243 #endif // #if VMA_STATS_STRING_ENABLED 5255 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5257 VMA_ASSERT(IsEmpty());
5258 pAllocationRequest->offset = 0;
5259 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5260 pAllocationRequest->sumItemSize = 0;
5261 pAllocationRequest->item = m_Suballocations.begin();
5262 pAllocationRequest->itemsToMakeLostCount = 0;
5265 bool VmaBlockMetadata::CreateAllocationRequest(
5266 uint32_t currentFrameIndex,
5267 uint32_t frameInUseCount,
5268 VkDeviceSize bufferImageGranularity,
5269 VkDeviceSize allocSize,
5270 VkDeviceSize allocAlignment,
5271 VmaSuballocationType allocType,
5272 bool canMakeOtherLost,
5273 VmaAllocationRequest* pAllocationRequest)
5275 VMA_ASSERT(allocSize > 0);
5276 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5277 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5278 VMA_HEAVY_ASSERT(Validate());
5281 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5287 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5288 if(freeSuballocCount > 0)
5293 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5294 m_FreeSuballocationsBySize.data(),
5295 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5297 VmaSuballocationItemSizeLess());
5298 size_t index = it - m_FreeSuballocationsBySize.data();
5299 for(; index < freeSuballocCount; ++index)
5304 bufferImageGranularity,
5308 m_FreeSuballocationsBySize[index],
5310 &pAllocationRequest->offset,
5311 &pAllocationRequest->itemsToMakeLostCount,
5312 &pAllocationRequest->sumFreeSize,
5313 &pAllocationRequest->sumItemSize))
5315 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5323 for(
size_t index = freeSuballocCount; index--; )
5328 bufferImageGranularity,
5332 m_FreeSuballocationsBySize[index],
5334 &pAllocationRequest->offset,
5335 &pAllocationRequest->itemsToMakeLostCount,
5336 &pAllocationRequest->sumFreeSize,
5337 &pAllocationRequest->sumItemSize))
5339 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5346 if(canMakeOtherLost)
5350 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5351 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5353 VmaAllocationRequest tmpAllocRequest = {};
5354 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5355 suballocIt != m_Suballocations.end();
5358 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5359 suballocIt->hAllocation->CanBecomeLost())
5364 bufferImageGranularity,
5370 &tmpAllocRequest.offset,
5371 &tmpAllocRequest.itemsToMakeLostCount,
5372 &tmpAllocRequest.sumFreeSize,
5373 &tmpAllocRequest.sumItemSize))
5375 tmpAllocRequest.item = suballocIt;
5377 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5379 *pAllocationRequest = tmpAllocRequest;
5385 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5394 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5395 uint32_t currentFrameIndex,
5396 uint32_t frameInUseCount,
5397 VmaAllocationRequest* pAllocationRequest)
5399 while(pAllocationRequest->itemsToMakeLostCount > 0)
5401 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5403 ++pAllocationRequest->item;
5405 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5406 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5407 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5408 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5410 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5411 --pAllocationRequest->itemsToMakeLostCount;
5419 VMA_HEAVY_ASSERT(Validate());
5420 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5421 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5426 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5428 uint32_t lostAllocationCount = 0;
5429 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5430 it != m_Suballocations.end();
5433 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5434 it->hAllocation->CanBecomeLost() &&
5435 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5437 it = FreeSuballocation(it);
5438 ++lostAllocationCount;
5441 return lostAllocationCount;
5444 void VmaBlockMetadata::Alloc(
5445 const VmaAllocationRequest& request,
5446 VmaSuballocationType type,
5447 VkDeviceSize allocSize,
5448 VmaAllocation hAllocation)
5450 VMA_ASSERT(request.item != m_Suballocations.end());
5451 VmaSuballocation& suballoc = *request.item;
5453 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5455 VMA_ASSERT(request.offset >= suballoc.offset);
5456 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5457 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5458 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5462 UnregisterFreeSuballocation(request.item);
5464 suballoc.offset = request.offset;
5465 suballoc.size = allocSize;
5466 suballoc.type = type;
5467 suballoc.hAllocation = hAllocation;
5472 VmaSuballocation paddingSuballoc = {};
5473 paddingSuballoc.offset = request.offset + allocSize;
5474 paddingSuballoc.size = paddingEnd;
5475 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5476 VmaSuballocationList::iterator next = request.item;
5478 const VmaSuballocationList::iterator paddingEndItem =
5479 m_Suballocations.insert(next, paddingSuballoc);
5480 RegisterFreeSuballocation(paddingEndItem);
5486 VmaSuballocation paddingSuballoc = {};
5487 paddingSuballoc.offset = request.offset - paddingBegin;
5488 paddingSuballoc.size = paddingBegin;
5489 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5490 const VmaSuballocationList::iterator paddingBeginItem =
5491 m_Suballocations.insert(request.item, paddingSuballoc);
5492 RegisterFreeSuballocation(paddingBeginItem);
5496 m_FreeCount = m_FreeCount - 1;
5497 if(paddingBegin > 0)
5505 m_SumFreeSize -= allocSize;
5508 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5510 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5511 suballocItem != m_Suballocations.end();
5514 VmaSuballocation& suballoc = *suballocItem;
5515 if(suballoc.hAllocation == allocation)
5517 FreeSuballocation(suballocItem);
5518 VMA_HEAVY_ASSERT(Validate());
5522 VMA_ASSERT(0 &&
"Not found!");
5525 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5527 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5528 suballocItem != m_Suballocations.end();
5531 VmaSuballocation& suballoc = *suballocItem;
5532 if(suballoc.offset == offset)
5534 FreeSuballocation(suballocItem);
5538 VMA_ASSERT(0 &&
"Not found!");
5541 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5543 VkDeviceSize lastSize = 0;
5544 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5546 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5548 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5553 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5558 if(it->size < lastSize)
5564 lastSize = it->size;
5569 bool VmaBlockMetadata::CheckAllocation(
5570 uint32_t currentFrameIndex,
5571 uint32_t frameInUseCount,
5572 VkDeviceSize bufferImageGranularity,
5573 VkDeviceSize allocSize,
5574 VkDeviceSize allocAlignment,
5575 VmaSuballocationType allocType,
5576 VmaSuballocationList::const_iterator suballocItem,
5577 bool canMakeOtherLost,
5578 VkDeviceSize* pOffset,
5579 size_t* itemsToMakeLostCount,
5580 VkDeviceSize* pSumFreeSize,
5581 VkDeviceSize* pSumItemSize)
const 5583 VMA_ASSERT(allocSize > 0);
5584 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5585 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5586 VMA_ASSERT(pOffset != VMA_NULL);
5588 *itemsToMakeLostCount = 0;
5592 if(canMakeOtherLost)
5594 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5596 *pSumFreeSize = suballocItem->size;
5600 if(suballocItem->hAllocation->CanBecomeLost() &&
5601 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5603 ++*itemsToMakeLostCount;
5604 *pSumItemSize = suballocItem->size;
5613 if(m_Size - suballocItem->offset < allocSize)
5619 *pOffset = suballocItem->offset;
5622 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5624 *pOffset += VMA_DEBUG_MARGIN;
5628 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5629 *pOffset = VmaAlignUp(*pOffset, alignment);
5633 if(bufferImageGranularity > 1)
5635 bool bufferImageGranularityConflict =
false;
5636 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5637 while(prevSuballocItem != m_Suballocations.cbegin())
5640 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5641 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5643 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5645 bufferImageGranularityConflict =
true;
5653 if(bufferImageGranularityConflict)
5655 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5661 if(*pOffset >= suballocItem->offset + suballocItem->size)
5667 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5670 VmaSuballocationList::const_iterator next = suballocItem;
5672 const VkDeviceSize requiredEndMargin =
5673 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5675 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5677 if(suballocItem->offset + totalSize > m_Size)
5684 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5685 if(totalSize > suballocItem->size)
5687 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5688 while(remainingSize > 0)
5691 if(lastSuballocItem == m_Suballocations.cend())
5695 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5697 *pSumFreeSize += lastSuballocItem->size;
5701 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5702 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5703 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5705 ++*itemsToMakeLostCount;
5706 *pSumItemSize += lastSuballocItem->size;
5713 remainingSize = (lastSuballocItem->size < remainingSize) ?
5714 remainingSize - lastSuballocItem->size : 0;
5720 if(bufferImageGranularity > 1)
5722 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5724 while(nextSuballocItem != m_Suballocations.cend())
5726 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5727 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5729 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5731 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5732 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5733 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5735 ++*itemsToMakeLostCount;
5754 const VmaSuballocation& suballoc = *suballocItem;
5755 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5757 *pSumFreeSize = suballoc.size;
5760 if(suballoc.size < allocSize)
5766 *pOffset = suballoc.offset;
5769 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5771 *pOffset += VMA_DEBUG_MARGIN;
5775 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5776 *pOffset = VmaAlignUp(*pOffset, alignment);
5780 if(bufferImageGranularity > 1)
5782 bool bufferImageGranularityConflict =
false;
5783 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5784 while(prevSuballocItem != m_Suballocations.cbegin())
5787 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5788 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5790 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5792 bufferImageGranularityConflict =
true;
5800 if(bufferImageGranularityConflict)
5802 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5807 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5810 VmaSuballocationList::const_iterator next = suballocItem;
5812 const VkDeviceSize requiredEndMargin =
5813 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5816 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5823 if(bufferImageGranularity > 1)
5825 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5827 while(nextSuballocItem != m_Suballocations.cend())
5829 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5830 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5832 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5851 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5853 VMA_ASSERT(item != m_Suballocations.end());
5854 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5856 VmaSuballocationList::iterator nextItem = item;
5858 VMA_ASSERT(nextItem != m_Suballocations.end());
5859 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5861 item->size += nextItem->size;
5863 m_Suballocations.erase(nextItem);
5866 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5869 VmaSuballocation& suballoc = *suballocItem;
5870 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5871 suballoc.hAllocation = VK_NULL_HANDLE;
5875 m_SumFreeSize += suballoc.size;
5878 bool mergeWithNext =
false;
5879 bool mergeWithPrev =
false;
5881 VmaSuballocationList::iterator nextItem = suballocItem;
5883 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5885 mergeWithNext =
true;
5888 VmaSuballocationList::iterator prevItem = suballocItem;
5889 if(suballocItem != m_Suballocations.begin())
5892 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5894 mergeWithPrev =
true;
5900 UnregisterFreeSuballocation(nextItem);
5901 MergeFreeWithNext(suballocItem);
5906 UnregisterFreeSuballocation(prevItem);
5907 MergeFreeWithNext(prevItem);
5908 RegisterFreeSuballocation(prevItem);
5913 RegisterFreeSuballocation(suballocItem);
5914 return suballocItem;
5918 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5920 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5921 VMA_ASSERT(item->size > 0);
5925 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5927 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5929 if(m_FreeSuballocationsBySize.empty())
5931 m_FreeSuballocationsBySize.push_back(item);
5935 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5943 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5945 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5946 VMA_ASSERT(item->size > 0);
5950 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5952 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5954 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5955 m_FreeSuballocationsBySize.data(),
5956 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5958 VmaSuballocationItemSizeLess());
5959 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5960 index < m_FreeSuballocationsBySize.size();
5963 if(m_FreeSuballocationsBySize[index] == item)
5965 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5968 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5970 VMA_ASSERT(0 &&
"Not found.");
5979 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5981 m_pMappedData(VMA_NULL)
5985 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5987 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5990 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData)
5997 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6000 m_MapCount += count;
6001 VMA_ASSERT(m_pMappedData != VMA_NULL);
6002 if(ppData != VMA_NULL)
6004 *ppData = m_pMappedData;
6010 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6011 hAllocator->m_hDevice,
6017 if(result == VK_SUCCESS)
6019 if(ppData != VMA_NULL)
6021 *ppData = m_pMappedData;
6029 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count)
6036 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6037 if(m_MapCount >= count)
6039 m_MapCount -= count;
6042 m_pMappedData = VMA_NULL;
6043 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
6048 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6055 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
6056 m_MemoryTypeIndex(UINT32_MAX),
6057 m_hMemory(VK_NULL_HANDLE),
6058 m_Metadata(hAllocator)
6062 void VmaDeviceMemoryBlock::Init(
6063 uint32_t newMemoryTypeIndex,
6064 VkDeviceMemory newMemory,
6065 VkDeviceSize newSize)
6067 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6069 m_MemoryTypeIndex = newMemoryTypeIndex;
6070 m_hMemory = newMemory;
6072 m_Metadata.Init(newSize);
6075 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
6079 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6081 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6082 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6083 m_hMemory = VK_NULL_HANDLE;
6086 bool VmaDeviceMemoryBlock::Validate()
const 6088 if((m_hMemory == VK_NULL_HANDLE) ||
6089 (m_Metadata.GetSize() == 0))
6094 return m_Metadata.Validate();
6097 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count,
void** ppData)
6099 return m_Mapping.Map(hAllocator, m_hMemory, count, ppData);
6102 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
6104 m_Mapping.Unmap(hAllocator, m_hMemory, count);
6109 memset(&outInfo, 0,
sizeof(outInfo));
6128 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6136 VmaPool_T::VmaPool_T(
6137 VmaAllocator hAllocator,
6141 createInfo.memoryTypeIndex,
6142 createInfo.blockSize,
6143 createInfo.minBlockCount,
6144 createInfo.maxBlockCount,
6146 createInfo.frameInUseCount,
6151 VmaPool_T::~VmaPool_T()
6155 #if VMA_STATS_STRING_ENABLED 6157 #endif // #if VMA_STATS_STRING_ENABLED 6159 VmaBlockVector::VmaBlockVector(
6160 VmaAllocator hAllocator,
6161 uint32_t memoryTypeIndex,
6162 VkDeviceSize preferredBlockSize,
6163 size_t minBlockCount,
6164 size_t maxBlockCount,
6165 VkDeviceSize bufferImageGranularity,
6166 uint32_t frameInUseCount,
6167 bool isCustomPool) :
6168 m_hAllocator(hAllocator),
6169 m_MemoryTypeIndex(memoryTypeIndex),
6170 m_PreferredBlockSize(preferredBlockSize),
6171 m_MinBlockCount(minBlockCount),
6172 m_MaxBlockCount(maxBlockCount),
6173 m_BufferImageGranularity(bufferImageGranularity),
6174 m_FrameInUseCount(frameInUseCount),
6175 m_IsCustomPool(isCustomPool),
6176 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6177 m_HasEmptyBlock(false),
6178 m_pDefragmentator(VMA_NULL)
6182 VmaBlockVector::~VmaBlockVector()
6184 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6186 for(
size_t i = m_Blocks.size(); i--; )
6188 m_Blocks[i]->Destroy(m_hAllocator);
6189 vma_delete(m_hAllocator, m_Blocks[i]);
6193 VkResult VmaBlockVector::CreateMinBlocks()
6195 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6197 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6198 if(res != VK_SUCCESS)
6206 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6214 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6216 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6218 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6220 VMA_HEAVY_ASSERT(pBlock->Validate());
6221 pBlock->m_Metadata.AddPoolStats(*pStats);
6225 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6227 VkResult VmaBlockVector::Allocate(
6228 VmaPool hCurrentPool,
6229 uint32_t currentFrameIndex,
6230 const VkMemoryRequirements& vkMemReq,
6232 VmaSuballocationType suballocType,
6233 VmaAllocation* pAllocation)
6238 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6242 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6244 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6245 VMA_ASSERT(pCurrBlock);
6246 VmaAllocationRequest currRequest = {};
6247 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6250 m_BufferImageGranularity,
6258 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6262 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6263 if(res != VK_SUCCESS)
6270 if(pCurrBlock->m_Metadata.IsEmpty())
6272 m_HasEmptyBlock =
false;
6275 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6276 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6277 (*pAllocation)->InitBlockAllocation(
6286 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6287 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6288 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6293 const bool canCreateNewBlock =
6295 (m_Blocks.size() < m_MaxBlockCount);
6298 if(canCreateNewBlock)
6301 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6302 uint32_t newBlockSizeShift = 0;
6303 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6307 if(m_IsCustomPool ==
false)
6310 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6311 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6313 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6314 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6316 newBlockSize = smallerNewBlockSize;
6317 ++newBlockSizeShift;
6326 size_t newBlockIndex = 0;
6327 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6329 if(m_IsCustomPool ==
false)
6331 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6333 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6334 if(smallerNewBlockSize >= vkMemReq.size)
6336 newBlockSize = smallerNewBlockSize;
6337 ++newBlockSizeShift;
6338 res = CreateBlock(newBlockSize, &newBlockIndex);
6347 if(res == VK_SUCCESS)
6349 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6350 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6354 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6355 if(res != VK_SUCCESS)
6362 VmaAllocationRequest allocRequest;
6363 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6364 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6365 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6366 (*pAllocation)->InitBlockAllocation(
6369 allocRequest.offset,
6375 VMA_HEAVY_ASSERT(pBlock->Validate());
6376 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6377 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6385 if(canMakeOtherLost)
6387 uint32_t tryIndex = 0;
6388 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6390 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6391 VmaAllocationRequest bestRequest = {};
6392 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6396 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6398 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6399 VMA_ASSERT(pCurrBlock);
6400 VmaAllocationRequest currRequest = {};
6401 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6404 m_BufferImageGranularity,
6411 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6412 if(pBestRequestBlock == VMA_NULL ||
6413 currRequestCost < bestRequestCost)
6415 pBestRequestBlock = pCurrBlock;
6416 bestRequest = currRequest;
6417 bestRequestCost = currRequestCost;
6419 if(bestRequestCost == 0)
6427 if(pBestRequestBlock != VMA_NULL)
6431 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6432 if(res != VK_SUCCESS)
6438 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6444 if(pBestRequestBlock->m_Metadata.IsEmpty())
6446 m_HasEmptyBlock =
false;
6449 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6450 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6451 (*pAllocation)->InitBlockAllocation(
6460 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6461 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6462 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6476 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6478 return VK_ERROR_TOO_MANY_OBJECTS;
6482 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6485 void VmaBlockVector::Free(
6486 VmaAllocation hAllocation)
6488 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6492 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6494 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6496 if(hAllocation->IsPersistentMap())
6498 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory, 1);
6501 pBlock->m_Metadata.Free(hAllocation);
6502 VMA_HEAVY_ASSERT(pBlock->Validate());
6504 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6507 if(pBlock->m_Metadata.IsEmpty())
6510 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6512 pBlockToDelete = pBlock;
6518 m_HasEmptyBlock =
true;
6523 else if(m_HasEmptyBlock)
6525 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6526 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6528 pBlockToDelete = pLastBlock;
6529 m_Blocks.pop_back();
6530 m_HasEmptyBlock =
false;
6534 IncrementallySortBlocks();
6539 if(pBlockToDelete != VMA_NULL)
6541 VMA_DEBUG_LOG(
" Deleted empty allocation");
6542 pBlockToDelete->Destroy(m_hAllocator);
6543 vma_delete(m_hAllocator, pBlockToDelete);
6547 size_t VmaBlockVector::CalcMaxBlockSize()
const 6550 for(
size_t i = m_Blocks.size(); i--; )
6552 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6553 if(result >= m_PreferredBlockSize)
6561 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6563 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6565 if(m_Blocks[blockIndex] == pBlock)
6567 VmaVectorRemove(m_Blocks, blockIndex);
6574 void VmaBlockVector::IncrementallySortBlocks()
6577 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6579 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6581 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6587 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6589 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6590 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6591 allocInfo.allocationSize = blockSize;
6592 VkDeviceMemory mem = VK_NULL_HANDLE;
6593 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6602 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6606 allocInfo.allocationSize);
6608 m_Blocks.push_back(pBlock);
6609 if(pNewBlockIndex != VMA_NULL)
6611 *pNewBlockIndex = m_Blocks.size() - 1;
6617 #if VMA_STATS_STRING_ENABLED 6619 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6621 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6627 json.WriteString(
"MemoryTypeIndex");
6628 json.WriteNumber(m_MemoryTypeIndex);
6630 json.WriteString(
"BlockSize");
6631 json.WriteNumber(m_PreferredBlockSize);
6633 json.WriteString(
"BlockCount");
6634 json.BeginObject(
true);
6635 if(m_MinBlockCount > 0)
6637 json.WriteString(
"Min");
6638 json.WriteNumber((uint64_t)m_MinBlockCount);
6640 if(m_MaxBlockCount < SIZE_MAX)
6642 json.WriteString(
"Max");
6643 json.WriteNumber((uint64_t)m_MaxBlockCount);
6645 json.WriteString(
"Cur");
6646 json.WriteNumber((uint64_t)m_Blocks.size());
6649 if(m_FrameInUseCount > 0)
6651 json.WriteString(
"FrameInUseCount");
6652 json.WriteNumber(m_FrameInUseCount);
6657 json.WriteString(
"PreferredBlockSize");
6658 json.WriteNumber(m_PreferredBlockSize);
6661 json.WriteString(
"Blocks");
6663 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6665 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6672 #endif // #if VMA_STATS_STRING_ENABLED 6674 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6675 VmaAllocator hAllocator,
6676 uint32_t currentFrameIndex)
6678 if(m_pDefragmentator == VMA_NULL)
6680 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6686 return m_pDefragmentator;
6689 VkResult VmaBlockVector::Defragment(
6691 VkDeviceSize& maxBytesToMove,
6692 uint32_t& maxAllocationsToMove)
6694 if(m_pDefragmentator == VMA_NULL)
6699 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6702 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6705 if(pDefragmentationStats != VMA_NULL)
6707 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6708 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6711 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6712 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6718 m_HasEmptyBlock =
false;
6719 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6721 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6722 if(pBlock->m_Metadata.IsEmpty())
6724 if(m_Blocks.size() > m_MinBlockCount)
6726 if(pDefragmentationStats != VMA_NULL)
6729 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6732 VmaVectorRemove(m_Blocks, blockIndex);
6733 pBlock->Destroy(m_hAllocator);
6734 vma_delete(m_hAllocator, pBlock);
6738 m_HasEmptyBlock =
true;
6746 void VmaBlockVector::DestroyDefragmentator()
6748 if(m_pDefragmentator != VMA_NULL)
6750 vma_delete(m_hAllocator, m_pDefragmentator);
6751 m_pDefragmentator = VMA_NULL;
6755 void VmaBlockVector::MakePoolAllocationsLost(
6756 uint32_t currentFrameIndex,
6757 size_t* pLostAllocationCount)
6759 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6760 size_t lostAllocationCount = 0;
6761 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6763 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6765 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6767 if(pLostAllocationCount != VMA_NULL)
6769 *pLostAllocationCount = lostAllocationCount;
6773 void VmaBlockVector::AddStats(
VmaStats* pStats)
6775 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6776 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6778 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6780 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6782 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6784 VMA_HEAVY_ASSERT(pBlock->Validate());
6786 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6787 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6788 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6789 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6796 VmaDefragmentator::VmaDefragmentator(
6797 VmaAllocator hAllocator,
6798 VmaBlockVector* pBlockVector,
6799 uint32_t currentFrameIndex) :
6800 m_hAllocator(hAllocator),
6801 m_pBlockVector(pBlockVector),
6802 m_CurrentFrameIndex(currentFrameIndex),
6804 m_AllocationsMoved(0),
6805 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6806 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6810 VmaDefragmentator::~VmaDefragmentator()
6812 for(
size_t i = m_Blocks.size(); i--; )
6814 vma_delete(m_hAllocator, m_Blocks[i]);
6818 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6820 AllocationInfo allocInfo;
6821 allocInfo.m_hAllocation = hAlloc;
6822 allocInfo.m_pChanged = pChanged;
6823 m_Allocations.push_back(allocInfo);
6826 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6829 if(m_pMappedDataForDefragmentation)
6831 *ppMappedData = m_pMappedDataForDefragmentation;
6836 if(m_pBlock->m_Mapping.GetMappedData())
6838 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6843 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
6844 *ppMappedData = m_pMappedDataForDefragmentation;
6848 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6850 if(m_pMappedDataForDefragmentation != VMA_NULL)
6852 m_pBlock->Unmap(hAllocator, 1);
6856 VkResult VmaDefragmentator::DefragmentRound(
6857 VkDeviceSize maxBytesToMove,
6858 uint32_t maxAllocationsToMove)
6860 if(m_Blocks.empty())
6865 size_t srcBlockIndex = m_Blocks.size() - 1;
6866 size_t srcAllocIndex = SIZE_MAX;
6872 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6874 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6877 if(srcBlockIndex == 0)
6884 srcAllocIndex = SIZE_MAX;
6889 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6893 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6894 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6896 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6897 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6898 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6899 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6902 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6904 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6905 VmaAllocationRequest dstAllocRequest;
6906 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6907 m_CurrentFrameIndex,
6908 m_pBlockVector->GetFrameInUseCount(),
6909 m_pBlockVector->GetBufferImageGranularity(),
6914 &dstAllocRequest) &&
6916 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6918 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6921 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6922 (m_BytesMoved + size > maxBytesToMove))
6924 return VK_INCOMPLETE;
6927 void* pDstMappedData = VMA_NULL;
6928 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6929 if(res != VK_SUCCESS)
6934 void* pSrcMappedData = VMA_NULL;
6935 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6936 if(res != VK_SUCCESS)
6943 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6944 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6945 static_cast<size_t>(size));
6947 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6948 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
6950 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6952 if(allocInfo.m_pChanged != VMA_NULL)
6954 *allocInfo.m_pChanged = VK_TRUE;
6957 ++m_AllocationsMoved;
6958 m_BytesMoved += size;
6960 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6968 if(srcAllocIndex > 0)
6974 if(srcBlockIndex > 0)
6977 srcAllocIndex = SIZE_MAX;
6987 VkResult VmaDefragmentator::Defragment(
6988 VkDeviceSize maxBytesToMove,
6989 uint32_t maxAllocationsToMove)
6991 if(m_Allocations.empty())
6997 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6998 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7000 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7001 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7002 m_Blocks.push_back(pBlockInfo);
7006 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7009 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7011 AllocationInfo& allocInfo = m_Allocations[blockIndex];
7013 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7015 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7016 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7017 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7019 (*it)->m_Allocations.push_back(allocInfo);
7027 m_Allocations.clear();
7029 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7031 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7032 pBlockInfo->CalcHasNonMovableAllocations();
7033 pBlockInfo->SortAllocationsBySizeDescecnding();
7037 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7040 VkResult result = VK_SUCCESS;
7041 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7043 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7047 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7049 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7055 bool VmaDefragmentator::MoveMakesSense(
7056 size_t dstBlockIndex, VkDeviceSize dstOffset,
7057 size_t srcBlockIndex, VkDeviceSize srcOffset)
7059 if(dstBlockIndex < srcBlockIndex)
7063 if(dstBlockIndex > srcBlockIndex)
7067 if(dstOffset < srcOffset)
7080 m_hDevice(pCreateInfo->device),
7081 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7082 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7083 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7084 m_PreferredLargeHeapBlockSize(0),
7085 m_PhysicalDevice(pCreateInfo->physicalDevice),
7086 m_CurrentFrameIndex(0),
7087 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
7091 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7092 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7093 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7095 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7096 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7098 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7100 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7111 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7112 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7119 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7121 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7122 if(limit != VK_WHOLE_SIZE)
7124 m_HeapSizeLimit[heapIndex] = limit;
7125 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7127 m_MemProps.memoryHeaps[heapIndex].size = limit;
7133 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7135 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7137 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7143 GetBufferImageGranularity(),
7148 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7152 VmaAllocator_T::~VmaAllocator_T()
7154 VMA_ASSERT(m_Pools.empty());
7156 for(
size_t i = GetMemoryTypeCount(); i--; )
7158 vma_delete(
this, m_pDedicatedAllocations[i]);
7159 vma_delete(
this, m_pBlockVectors[i]);
7163 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7165 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7166 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7167 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7168 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7169 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7170 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7171 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7172 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7173 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7174 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7175 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7176 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7177 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7178 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7179 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7180 if(m_UseKhrDedicatedAllocation)
7182 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7183 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7184 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7185 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7187 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7189 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7190 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7192 if(pVulkanFunctions != VMA_NULL)
7194 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7195 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7196 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7197 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7198 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7199 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7200 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7201 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7202 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7203 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7204 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7205 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7206 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7207 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7208 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7209 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7212 #undef VMA_COPY_IF_NOT_NULL 7216 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7217 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7218 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7219 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7220 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7221 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7222 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7223 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7224 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7225 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7226 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7227 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7228 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7229 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7230 if(m_UseKhrDedicatedAllocation)
7232 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7233 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7237 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7239 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7240 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7241 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7242 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7245 VkResult VmaAllocator_T::AllocateMemoryOfType(
7246 const VkMemoryRequirements& vkMemReq,
7247 bool dedicatedAllocation,
7248 VkBuffer dedicatedBuffer,
7249 VkImage dedicatedImage,
7251 uint32_t memTypeIndex,
7252 VmaSuballocationType suballocType,
7253 VmaAllocation* pAllocation)
7255 VMA_ASSERT(pAllocation != VMA_NULL);
7256 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7262 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7267 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7268 VMA_ASSERT(blockVector);
7270 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7271 bool preferDedicatedMemory =
7272 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7273 dedicatedAllocation ||
7275 vkMemReq.size > preferredBlockSize / 2;
7277 if(preferDedicatedMemory &&
7279 finalCreateInfo.
pool == VK_NULL_HANDLE)
7288 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7292 return AllocateDedicatedMemory(
7306 VkResult res = blockVector->Allocate(
7308 m_CurrentFrameIndex.load(),
7313 if(res == VK_SUCCESS)
7321 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7325 res = AllocateDedicatedMemory(
7331 finalCreateInfo.pUserData,
7335 if(res == VK_SUCCESS)
7338 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7344 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7351 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7353 VmaSuballocationType suballocType,
7354 uint32_t memTypeIndex,
7356 bool isUserDataString,
7358 VkBuffer dedicatedBuffer,
7359 VkImage dedicatedImage,
7360 VmaAllocation* pAllocation)
7362 VMA_ASSERT(pAllocation);
7364 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7365 allocInfo.memoryTypeIndex = memTypeIndex;
7366 allocInfo.allocationSize = size;
7368 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7369 if(m_UseKhrDedicatedAllocation)
7371 if(dedicatedBuffer != VK_NULL_HANDLE)
7373 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7374 dedicatedAllocInfo.buffer = dedicatedBuffer;
7375 allocInfo.pNext = &dedicatedAllocInfo;
7377 else if(dedicatedImage != VK_NULL_HANDLE)
7379 dedicatedAllocInfo.image = dedicatedImage;
7380 allocInfo.pNext = &dedicatedAllocInfo;
7385 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7386 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7389 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7393 void* pMappedData = VMA_NULL;
7396 res = (*m_VulkanFunctions.vkMapMemory)(
7405 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7406 FreeVulkanMemory(memTypeIndex, size, hMemory);
7411 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7412 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7413 (*pAllocation)->SetUserData(
this, pUserData);
7417 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7418 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7419 VMA_ASSERT(pDedicatedAllocations);
7420 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7423 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7428 void VmaAllocator_T::GetBufferMemoryRequirements(
7430 VkMemoryRequirements& memReq,
7431 bool& requiresDedicatedAllocation,
7432 bool& prefersDedicatedAllocation)
const 7434 if(m_UseKhrDedicatedAllocation)
7436 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7437 memReqInfo.buffer = hBuffer;
7439 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7441 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7442 memReq2.pNext = &memDedicatedReq;
7444 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7446 memReq = memReq2.memoryRequirements;
7447 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7448 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7452 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7453 requiresDedicatedAllocation =
false;
7454 prefersDedicatedAllocation =
false;
7458 void VmaAllocator_T::GetImageMemoryRequirements(
7460 VkMemoryRequirements& memReq,
7461 bool& requiresDedicatedAllocation,
7462 bool& prefersDedicatedAllocation)
const 7464 if(m_UseKhrDedicatedAllocation)
7466 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7467 memReqInfo.image = hImage;
7469 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7471 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7472 memReq2.pNext = &memDedicatedReq;
7474 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7476 memReq = memReq2.memoryRequirements;
7477 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7478 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7482 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7483 requiresDedicatedAllocation =
false;
7484 prefersDedicatedAllocation =
false;
7488 VkResult VmaAllocator_T::AllocateMemory(
7489 const VkMemoryRequirements& vkMemReq,
7490 bool requiresDedicatedAllocation,
7491 bool prefersDedicatedAllocation,
7492 VkBuffer dedicatedBuffer,
7493 VkImage dedicatedImage,
7495 VmaSuballocationType suballocType,
7496 VmaAllocation* pAllocation)
7501 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7502 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7507 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7508 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7510 if(requiresDedicatedAllocation)
7514 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7515 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7517 if(createInfo.
pool != VK_NULL_HANDLE)
7519 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7520 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7523 if((createInfo.
pool != VK_NULL_HANDLE) &&
7526 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7527 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7530 if(createInfo.
pool != VK_NULL_HANDLE)
7532 return createInfo.
pool->m_BlockVector.Allocate(
7534 m_CurrentFrameIndex.load(),
7543 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7544 uint32_t memTypeIndex = UINT32_MAX;
7546 if(res == VK_SUCCESS)
7548 res = AllocateMemoryOfType(
7550 requiresDedicatedAllocation || prefersDedicatedAllocation,
7558 if(res == VK_SUCCESS)
7568 memoryTypeBits &= ~(1u << memTypeIndex);
7571 if(res == VK_SUCCESS)
7573 res = AllocateMemoryOfType(
7575 requiresDedicatedAllocation || prefersDedicatedAllocation,
7583 if(res == VK_SUCCESS)
7593 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7604 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7606 VMA_ASSERT(allocation);
7608 if(allocation->CanBecomeLost() ==
false ||
7609 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7611 switch(allocation->GetType())
7613 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7615 VmaBlockVector* pBlockVector = VMA_NULL;
7616 VmaPool hPool = allocation->GetPool();
7617 if(hPool != VK_NULL_HANDLE)
7619 pBlockVector = &hPool->m_BlockVector;
7623 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7624 pBlockVector = m_pBlockVectors[memTypeIndex];
7626 pBlockVector->Free(allocation);
7629 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7630 FreeDedicatedMemory(allocation);
7637 allocation->SetUserData(
this, VMA_NULL);
7638 vma_delete(
this, allocation);
7641 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7644 InitStatInfo(pStats->
total);
7645 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7647 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7651 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7653 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7654 VMA_ASSERT(pBlockVector);
7655 pBlockVector->AddStats(pStats);
7660 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7661 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7663 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7668 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7670 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7671 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7672 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7673 VMA_ASSERT(pDedicatedAllocVector);
7674 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7677 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7678 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7679 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7680 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7685 VmaPostprocessCalcStatInfo(pStats->
total);
7686 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7687 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7688 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7689 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7692 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7694 VkResult VmaAllocator_T::Defragment(
7695 VmaAllocation* pAllocations,
7696 size_t allocationCount,
7697 VkBool32* pAllocationsChanged,
7701 if(pAllocationsChanged != VMA_NULL)
7703 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7705 if(pDefragmentationStats != VMA_NULL)
7707 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7710 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7712 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7714 const size_t poolCount = m_Pools.size();
7717 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7719 VmaAllocation hAlloc = pAllocations[allocIndex];
7721 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7723 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7725 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7727 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7729 VmaBlockVector* pAllocBlockVector = VMA_NULL;
7731 const VmaPool hAllocPool = hAlloc->GetPool();
7733 if(hAllocPool != VK_NULL_HANDLE)
7735 pAllocBlockVector = &hAllocPool->GetBlockVector();
7740 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7743 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7745 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7746 &pAllocationsChanged[allocIndex] : VMA_NULL;
7747 pDefragmentator->AddAllocation(hAlloc, pChanged);
7751 VkResult result = VK_SUCCESS;
7755 VkDeviceSize maxBytesToMove = SIZE_MAX;
7756 uint32_t maxAllocationsToMove = UINT32_MAX;
7757 if(pDefragmentationInfo != VMA_NULL)
7764 for(uint32_t memTypeIndex = 0;
7765 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7769 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7771 result = m_pBlockVectors[memTypeIndex]->Defragment(
7772 pDefragmentationStats,
7774 maxAllocationsToMove);
7779 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7781 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7782 pDefragmentationStats,
7784 maxAllocationsToMove);
7790 for(
size_t poolIndex = poolCount; poolIndex--; )
7792 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7796 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7798 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7800 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7807 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7809 if(hAllocation->CanBecomeLost())
7815 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7816 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7819 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7823 pAllocationInfo->
offset = 0;
7824 pAllocationInfo->
size = hAllocation->GetSize();
7826 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7829 else if(localLastUseFrameIndex == localCurrFrameIndex)
7831 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7832 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7833 pAllocationInfo->
offset = hAllocation->GetOffset();
7834 pAllocationInfo->
size = hAllocation->GetSize();
7836 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7841 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7843 localLastUseFrameIndex = localCurrFrameIndex;
7850 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7851 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7852 pAllocationInfo->
offset = hAllocation->GetOffset();
7853 pAllocationInfo->
size = hAllocation->GetSize();
7854 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7855 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7859 bool VmaAllocator_T::TouchAllocation(VmaAllocation hAllocation)
7862 if(hAllocation->CanBecomeLost())
7864 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7865 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7868 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7872 else if(localLastUseFrameIndex == localCurrFrameIndex)
7878 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7880 localLastUseFrameIndex = localCurrFrameIndex;
7891 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7893 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7906 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7908 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7909 if(res != VK_SUCCESS)
7911 vma_delete(
this, *pPool);
7918 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7919 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7925 void VmaAllocator_T::DestroyPool(VmaPool pool)
7929 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7930 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7931 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7934 vma_delete(
this, pool);
7937 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7939 pool->m_BlockVector.GetPoolStats(pPoolStats);
7942 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7944 m_CurrentFrameIndex.store(frameIndex);
7947 void VmaAllocator_T::MakePoolAllocationsLost(
7949 size_t* pLostAllocationCount)
7951 hPool->m_BlockVector.MakePoolAllocationsLost(
7952 m_CurrentFrameIndex.load(),
7953 pLostAllocationCount);
7956 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7958 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7959 (*pAllocation)->InitLost();
7962 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7964 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7967 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7969 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7970 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7972 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7973 if(res == VK_SUCCESS)
7975 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7980 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7985 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7988 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7990 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7996 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7998 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
8000 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
8003 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8005 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8006 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8008 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8009 m_HeapSizeLimit[heapIndex] += size;
8013 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
8015 if(hAllocation->CanBecomeLost())
8017 return VK_ERROR_MEMORY_MAP_FAILED;
8020 switch(hAllocation->GetType())
8022 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8024 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8025 char *pBytes = VMA_NULL;
8026 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
8027 if(res == VK_SUCCESS)
8029 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8030 hAllocation->BlockAllocMap();
8034 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8035 return hAllocation->DedicatedAllocMap(
this, ppData);
8038 return VK_ERROR_MEMORY_MAP_FAILED;
8042 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
8044 switch(hAllocation->GetType())
8046 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8048 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8049 hAllocation->BlockAllocUnmap();
8050 pBlock->Unmap(
this, 1);
8053 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8054 hAllocation->DedicatedAllocUnmap(
this);
8061 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
8063 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8065 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8067 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8068 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8069 VMA_ASSERT(pDedicatedAllocations);
8070 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8071 VMA_ASSERT(success);
8074 VkDeviceMemory hMemory = allocation->GetMemory();
8076 if(allocation->GetMappedData() != VMA_NULL)
8078 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8081 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8083 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8086 #if VMA_STATS_STRING_ENABLED 8088 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8090 bool dedicatedAllocationsStarted =
false;
8091 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8093 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8094 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8095 VMA_ASSERT(pDedicatedAllocVector);
8096 if(pDedicatedAllocVector->empty() ==
false)
8098 if(dedicatedAllocationsStarted ==
false)
8100 dedicatedAllocationsStarted =
true;
8101 json.WriteString(
"DedicatedAllocations");
8105 json.BeginString(
"Type ");
8106 json.ContinueString(memTypeIndex);
8111 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8113 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
8114 json.BeginObject(
true);
8116 json.WriteString(
"Type");
8117 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
8119 json.WriteString(
"Size");
8120 json.WriteNumber(hAlloc->GetSize());
8122 const void* pUserData = hAlloc->GetUserData();
8123 if(pUserData != VMA_NULL)
8125 json.WriteString(
"UserData");
8126 if(hAlloc->IsUserDataString())
8128 json.WriteString((
const char*)pUserData);
8133 json.ContinueString_Pointer(pUserData);
8144 if(dedicatedAllocationsStarted)
8150 bool allocationsStarted =
false;
8151 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8153 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8155 if(allocationsStarted ==
false)
8157 allocationsStarted =
true;
8158 json.WriteString(
"DefaultPools");
8162 json.BeginString(
"Type ");
8163 json.ContinueString(memTypeIndex);
8166 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8169 if(allocationsStarted)
8176 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8177 const size_t poolCount = m_Pools.size();
8180 json.WriteString(
"Pools");
8182 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8184 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8191 #endif // #if VMA_STATS_STRING_ENABLED 8193 static VkResult AllocateMemoryForImage(
8194 VmaAllocator allocator,
8197 VmaSuballocationType suballocType,
8198 VmaAllocation* pAllocation)
8200 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8202 VkMemoryRequirements vkMemReq = {};
8203 bool requiresDedicatedAllocation =
false;
8204 bool prefersDedicatedAllocation =
false;
8205 allocator->GetImageMemoryRequirements(image, vkMemReq,
8206 requiresDedicatedAllocation, prefersDedicatedAllocation);
8208 return allocator->AllocateMemory(
8210 requiresDedicatedAllocation,
8211 prefersDedicatedAllocation,
8214 *pAllocationCreateInfo,
8224 VmaAllocator* pAllocator)
8226 VMA_ASSERT(pCreateInfo && pAllocator);
8227 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8233 VmaAllocator allocator)
8235 if(allocator != VK_NULL_HANDLE)
8237 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8238 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8239 vma_delete(&allocationCallbacks, allocator);
8244 VmaAllocator allocator,
8245 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8247 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8248 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8252 VmaAllocator allocator,
8253 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8255 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8256 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8260 VmaAllocator allocator,
8261 uint32_t memoryTypeIndex,
8262 VkMemoryPropertyFlags* pFlags)
8264 VMA_ASSERT(allocator && pFlags);
8265 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8266 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8270 VmaAllocator allocator,
8271 uint32_t frameIndex)
8273 VMA_ASSERT(allocator);
8274 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8276 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8278 allocator->SetCurrentFrameIndex(frameIndex);
8282 VmaAllocator allocator,
8285 VMA_ASSERT(allocator && pStats);
8286 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8287 allocator->CalculateStats(pStats);
8290 #if VMA_STATS_STRING_ENABLED 8293 VmaAllocator allocator,
8294 char** ppStatsString,
8295 VkBool32 detailedMap)
8297 VMA_ASSERT(allocator && ppStatsString);
8298 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8300 VmaStringBuilder sb(allocator);
8302 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8306 allocator->CalculateStats(&stats);
8308 json.WriteString(
"Total");
8309 VmaPrintStatInfo(json, stats.
total);
8311 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8313 json.BeginString(
"Heap ");
8314 json.ContinueString(heapIndex);
8318 json.WriteString(
"Size");
8319 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8321 json.WriteString(
"Flags");
8322 json.BeginArray(
true);
8323 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8325 json.WriteString(
"DEVICE_LOCAL");
8331 json.WriteString(
"Stats");
8332 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8335 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8337 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8339 json.BeginString(
"Type ");
8340 json.ContinueString(typeIndex);
8345 json.WriteString(
"Flags");
8346 json.BeginArray(
true);
8347 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8348 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8350 json.WriteString(
"DEVICE_LOCAL");
8352 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8354 json.WriteString(
"HOST_VISIBLE");
8356 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8358 json.WriteString(
"HOST_COHERENT");
8360 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8362 json.WriteString(
"HOST_CACHED");
8364 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8366 json.WriteString(
"LAZILY_ALLOCATED");
8372 json.WriteString(
"Stats");
8373 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8382 if(detailedMap == VK_TRUE)
8384 allocator->PrintDetailedMap(json);
8390 const size_t len = sb.GetLength();
8391 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8394 memcpy(pChars, sb.GetData(), len);
8397 *ppStatsString = pChars;
8401 VmaAllocator allocator,
8404 if(pStatsString != VMA_NULL)
8406 VMA_ASSERT(allocator);
8407 size_t len = strlen(pStatsString);
8408 vma_delete_array(allocator, pStatsString, len + 1);
8412 #endif // #if VMA_STATS_STRING_ENABLED 8418 VmaAllocator allocator,
8419 uint32_t memoryTypeBits,
8421 uint32_t* pMemoryTypeIndex)
8423 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8424 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8425 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8432 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8436 switch(pAllocationCreateInfo->
usage)
8441 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8444 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8447 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8448 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8451 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8452 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8458 *pMemoryTypeIndex = UINT32_MAX;
8459 uint32_t minCost = UINT32_MAX;
8460 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8461 memTypeIndex < allocator->GetMemoryTypeCount();
8462 ++memTypeIndex, memTypeBit <<= 1)
8465 if((memTypeBit & memoryTypeBits) != 0)
8467 const VkMemoryPropertyFlags currFlags =
8468 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8470 if((requiredFlags & ~currFlags) == 0)
8473 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8475 if(currCost < minCost)
8477 *pMemoryTypeIndex = memTypeIndex;
8487 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8491 VmaAllocator allocator,
8492 const VkBufferCreateInfo* pBufferCreateInfo,
8494 uint32_t* pMemoryTypeIndex)
8496 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8497 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8498 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8499 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8501 const VkDevice hDev = allocator->m_hDevice;
8502 VkBuffer hBuffer = VK_NULL_HANDLE;
8503 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
8504 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
8505 if(res == VK_SUCCESS)
8507 VkMemoryRequirements memReq = {};
8508 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
8509 hDev, hBuffer, &memReq);
8513 memReq.memoryTypeBits,
8514 pAllocationCreateInfo,
8517 allocator->GetVulkanFunctions().vkDestroyBuffer(
8518 hDev, hBuffer, allocator->GetAllocationCallbacks());
8524 VmaAllocator allocator,
8525 const VkImageCreateInfo* pImageCreateInfo,
8527 uint32_t* pMemoryTypeIndex)
8529 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8530 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
8531 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8532 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8534 const VkDevice hDev = allocator->m_hDevice;
8535 VkImage hImage = VK_NULL_HANDLE;
8536 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
8537 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
8538 if(res == VK_SUCCESS)
8540 VkMemoryRequirements memReq = {};
8541 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
8542 hDev, hImage, &memReq);
8546 memReq.memoryTypeBits,
8547 pAllocationCreateInfo,
8550 allocator->GetVulkanFunctions().vkDestroyImage(
8551 hDev, hImage, allocator->GetAllocationCallbacks());
8557 VmaAllocator allocator,
8561 VMA_ASSERT(allocator && pCreateInfo && pPool);
8563 VMA_DEBUG_LOG(
"vmaCreatePool");
8565 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8567 return allocator->CreatePool(pCreateInfo, pPool);
8571 VmaAllocator allocator,
8574 VMA_ASSERT(allocator);
8576 if(pool == VK_NULL_HANDLE)
8581 VMA_DEBUG_LOG(
"vmaDestroyPool");
8583 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8585 allocator->DestroyPool(pool);
8589 VmaAllocator allocator,
8593 VMA_ASSERT(allocator && pool && pPoolStats);
8595 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8597 allocator->GetPoolStats(pool, pPoolStats);
8601 VmaAllocator allocator,
8603 size_t* pLostAllocationCount)
8605 VMA_ASSERT(allocator && pool);
8607 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8609 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8613 VmaAllocator allocator,
8614 const VkMemoryRequirements* pVkMemoryRequirements,
8616 VmaAllocation* pAllocation,
8619 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8621 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8623 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8625 VkResult result = allocator->AllocateMemory(
8626 *pVkMemoryRequirements,
8632 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8635 if(pAllocationInfo && result == VK_SUCCESS)
8637 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8644 VmaAllocator allocator,
8647 VmaAllocation* pAllocation,
8650 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8652 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8654 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8656 VkMemoryRequirements vkMemReq = {};
8657 bool requiresDedicatedAllocation =
false;
8658 bool prefersDedicatedAllocation =
false;
8659 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8660 requiresDedicatedAllocation,
8661 prefersDedicatedAllocation);
8663 VkResult result = allocator->AllocateMemory(
8665 requiresDedicatedAllocation,
8666 prefersDedicatedAllocation,
8670 VMA_SUBALLOCATION_TYPE_BUFFER,
8673 if(pAllocationInfo && result == VK_SUCCESS)
8675 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8682 VmaAllocator allocator,
8685 VmaAllocation* pAllocation,
8688 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8690 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8692 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8694 VkResult result = AllocateMemoryForImage(
8698 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8701 if(pAllocationInfo && result == VK_SUCCESS)
8703 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8710 VmaAllocator allocator,
8711 VmaAllocation allocation)
8713 VMA_ASSERT(allocator && allocation);
8715 VMA_DEBUG_LOG(
"vmaFreeMemory");
8717 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8719 allocator->FreeMemory(allocation);
8723 VmaAllocator allocator,
8724 VmaAllocation allocation,
8727 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8729 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8731 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8735 VmaAllocator allocator,
8736 VmaAllocation allocation)
8738 VMA_ASSERT(allocator && allocation);
8740 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8742 return allocator->TouchAllocation(allocation);
8746 VmaAllocator allocator,
8747 VmaAllocation allocation,
8750 VMA_ASSERT(allocator && allocation);
8752 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8754 allocation->SetUserData(allocator, pUserData);
8758 VmaAllocator allocator,
8759 VmaAllocation* pAllocation)
8761 VMA_ASSERT(allocator && pAllocation);
8763 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8765 allocator->CreateLostAllocation(pAllocation);
8769 VmaAllocator allocator,
8770 VmaAllocation allocation,
8773 VMA_ASSERT(allocator && allocation && ppData);
8775 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8777 return allocator->Map(allocation, ppData);
8781 VmaAllocator allocator,
8782 VmaAllocation allocation)
8784 VMA_ASSERT(allocator && allocation);
8786 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8788 allocator->Unmap(allocation);
8792 VmaAllocator allocator,
8793 VmaAllocation* pAllocations,
8794 size_t allocationCount,
8795 VkBool32* pAllocationsChanged,
8799 VMA_ASSERT(allocator && pAllocations);
8801 VMA_DEBUG_LOG(
"vmaDefragment");
8803 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8805 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8809 VmaAllocator allocator,
8810 const VkBufferCreateInfo* pBufferCreateInfo,
8813 VmaAllocation* pAllocation,
8816 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8818 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8820 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8822 *pBuffer = VK_NULL_HANDLE;
8823 *pAllocation = VK_NULL_HANDLE;
8826 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8827 allocator->m_hDevice,
8829 allocator->GetAllocationCallbacks(),
8834 VkMemoryRequirements vkMemReq = {};
8835 bool requiresDedicatedAllocation =
false;
8836 bool prefersDedicatedAllocation =
false;
8837 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8838 requiresDedicatedAllocation, prefersDedicatedAllocation);
8842 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8844 VMA_ASSERT(vkMemReq.alignment %
8845 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8847 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8849 VMA_ASSERT(vkMemReq.alignment %
8850 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8852 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8854 VMA_ASSERT(vkMemReq.alignment %
8855 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8859 res = allocator->AllocateMemory(
8861 requiresDedicatedAllocation,
8862 prefersDedicatedAllocation,
8865 *pAllocationCreateInfo,
8866 VMA_SUBALLOCATION_TYPE_BUFFER,
8871 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8872 allocator->m_hDevice,
8874 (*pAllocation)->GetMemory(),
8875 (*pAllocation)->GetOffset());
8879 if(pAllocationInfo != VMA_NULL)
8881 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8885 allocator->FreeMemory(*pAllocation);
8886 *pAllocation = VK_NULL_HANDLE;
8887 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8888 *pBuffer = VK_NULL_HANDLE;
8891 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8892 *pBuffer = VK_NULL_HANDLE;
8899 VmaAllocator allocator,
8901 VmaAllocation allocation)
8903 if(buffer != VK_NULL_HANDLE)
8905 VMA_ASSERT(allocator);
8907 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8909 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8911 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8913 allocator->FreeMemory(allocation);
8918 VmaAllocator allocator,
8919 const VkImageCreateInfo* pImageCreateInfo,
8922 VmaAllocation* pAllocation,
8925 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8927 VMA_DEBUG_LOG(
"vmaCreateImage");
8929 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8931 *pImage = VK_NULL_HANDLE;
8932 *pAllocation = VK_NULL_HANDLE;
8935 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8936 allocator->m_hDevice,
8938 allocator->GetAllocationCallbacks(),
8942 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8943 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8944 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8947 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8951 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8952 allocator->m_hDevice,
8954 (*pAllocation)->GetMemory(),
8955 (*pAllocation)->GetOffset());
8959 if(pAllocationInfo != VMA_NULL)
8961 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8965 allocator->FreeMemory(*pAllocation);
8966 *pAllocation = VK_NULL_HANDLE;
8967 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8968 *pImage = VK_NULL_HANDLE;
8971 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8972 *pImage = VK_NULL_HANDLE;
8979 VmaAllocator allocator,
8981 VmaAllocation allocation)
8983 if(image != VK_NULL_HANDLE)
8985 VMA_ASSERT(allocator);
8987 VMA_DEBUG_LOG(
"vmaDestroyImage");
8989 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8991 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8993 allocator->FreeMemory(allocation);
8997 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:938
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1192
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:968
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:963
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:953
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:948
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1154
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:947
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1503
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:965
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1678
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1373
+
Definition: vk_mem_alloc.h:1149
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:942
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1498
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:960
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1697
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1368
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1427
-
Definition: vk_mem_alloc.h:1234
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:936
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1272
-
Definition: vk_mem_alloc.h:1181
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:977
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1422
+
Definition: vk_mem_alloc.h:1229
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:931
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1267
+
Definition: vk_mem_alloc.h:1176
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:972
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1030
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:962
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1025
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:957
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1185
-
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1095
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:950
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1094
-
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:958
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1682
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1180
+
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1090
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:945
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1089
+
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:953
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1701
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:994
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1104
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1690
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1256
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1673
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:951
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:878
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:971
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1381
-
Definition: vk_mem_alloc.h:1375
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1513
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:989
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1099
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1709
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1251
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1692
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:946
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:873
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:966
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1376
+
Definition: vk_mem_alloc.h:1370
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1508
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:948
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1293
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1397
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1433
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:943
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1288
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1392
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1428
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:934
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1384
+
Definition: vk_mem_alloc.h:929
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1379
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1132
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1127
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1668
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1687
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1686
-
Definition: vk_mem_alloc.h:1171
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1280
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:949
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1705
+
Definition: vk_mem_alloc.h:1166
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1275
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:944
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1100
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:884
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1095
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:879
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:905
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:900
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:910
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1688
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:905
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1707
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1267
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1443
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1262
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1438
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:944
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1083
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1392
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:897
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:939
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1078
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1387
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:892
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1241
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1096
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:901
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1387
-
Definition: vk_mem_alloc.h:1180
+
Definition: vk_mem_alloc.h:1236
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1091
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:896
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1382
+
Definition: vk_mem_alloc.h:1175
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1262
-
Definition: vk_mem_alloc.h:1253
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1257
+
Definition: vk_mem_alloc.h:1248
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1086
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:946
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1405
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:980
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1436
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1251
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1286
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1081
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:941
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1400
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:975
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1431
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1246
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1281
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1018
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1102
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1221
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1095
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1013
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1097
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1216
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1090
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:955
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:899
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:954
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:950
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:894
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:949
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1419
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1414
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1527
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:974
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1095
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1092
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1522
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:969
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1090
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1087
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1424
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1508
-
Definition: vk_mem_alloc.h:1249
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1684
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:942
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1419
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1503
+
Definition: vk_mem_alloc.h:1244
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1703
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:937
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:957
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1090
-
Definition: vk_mem_alloc.h:1137
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1377
+
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:952
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1085
+
Definition: vk_mem_alloc.h:1132
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1372
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1088
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:952
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:956
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1208
-
Definition: vk_mem_alloc.h:1164
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1522
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1083
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:947
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:951
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1203
+
Definition: vk_mem_alloc.h:1159
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1517
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:932
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:927
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:945
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1489
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:940
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1484
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1355
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1096
-
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
TODO finish documentation...
-
Definition: vk_mem_alloc.h:1247
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1103
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1350
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1091
+
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
+
Definition: vk_mem_alloc.h:1242
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1098
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1430
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1096
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1494
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1425
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1091
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1489