23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1258 #include <vulkan/vulkan.h> 1260 #if !defined(VMA_DEDICATED_ALLOCATION) 1261 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1262 #define VMA_DEDICATED_ALLOCATION 1 1264 #define VMA_DEDICATED_ALLOCATION 0 1282 uint32_t memoryType,
1283 VkDeviceMemory memory,
1288 uint32_t memoryType,
1289 VkDeviceMemory memory,
1361 #if VMA_DEDICATED_ALLOCATION 1362 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1363 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1384 #ifndef VMA_RECORDING_ENABLED 1386 #define VMA_RECORDING_ENABLED 1 1388 #define VMA_RECORDING_ENABLED 0 1501 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1509 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1519 uint32_t memoryTypeIndex,
1520 VkMemoryPropertyFlags* pFlags);
1532 uint32_t frameIndex);
1565 #define VMA_STATS_STRING_ENABLED 1 1567 #if VMA_STATS_STRING_ENABLED 1574 char** ppStatsString,
1575 VkBool32 detailedMap);
1579 char* pStatsString);
1581 #endif // #if VMA_STATS_STRING_ENABLED 1775 uint32_t memoryTypeBits,
1777 uint32_t* pMemoryTypeIndex);
1793 const VkBufferCreateInfo* pBufferCreateInfo,
1795 uint32_t* pMemoryTypeIndex);
1811 const VkImageCreateInfo* pImageCreateInfo,
1813 uint32_t* pMemoryTypeIndex);
1944 size_t* pLostAllocationCount);
2043 const VkMemoryRequirements* pVkMemoryRequirements,
2349 size_t allocationCount,
2350 VkBool32* pAllocationsChanged,
2416 const VkBufferCreateInfo* pBufferCreateInfo,
2441 const VkImageCreateInfo* pImageCreateInfo,
2467 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2470 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2471 #define VMA_IMPLEMENTATION 2474 #ifdef VMA_IMPLEMENTATION 2475 #undef VMA_IMPLEMENTATION 2497 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2498 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2510 #if VMA_USE_STL_CONTAINERS 2511 #define VMA_USE_STL_VECTOR 1 2512 #define VMA_USE_STL_UNORDERED_MAP 1 2513 #define VMA_USE_STL_LIST 1 2516 #if VMA_USE_STL_VECTOR 2520 #if VMA_USE_STL_UNORDERED_MAP 2521 #include <unordered_map> 2524 #if VMA_USE_STL_LIST 2533 #include <algorithm> 2539 #define VMA_NULL nullptr 2542 #if defined(__APPLE__) || defined(__ANDROID__) 2544 void *aligned_alloc(
size_t alignment,
size_t size)
2547 if(alignment <
sizeof(
void*))
2549 alignment =
sizeof(
void*);
2553 if(posix_memalign(&pointer, alignment, size) == 0)
2567 #define VMA_ASSERT(expr) assert(expr) 2569 #define VMA_ASSERT(expr) 2575 #ifndef VMA_HEAVY_ASSERT 2577 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2579 #define VMA_HEAVY_ASSERT(expr) 2583 #ifndef VMA_ALIGN_OF 2584 #define VMA_ALIGN_OF(type) (__alignof(type)) 2587 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2589 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2591 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2595 #ifndef VMA_SYSTEM_FREE 2597 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2599 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2604 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2608 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2612 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2616 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2619 #ifndef VMA_DEBUG_LOG 2620 #define VMA_DEBUG_LOG(format, ...) 2630 #if VMA_STATS_STRING_ENABLED 2631 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2633 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2635 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2637 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2639 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2641 snprintf(outStr, strLen,
"%p", ptr);
2651 void Lock() { m_Mutex.lock(); }
2652 void Unlock() { m_Mutex.unlock(); }
2656 #define VMA_MUTEX VmaMutex 2667 #ifndef VMA_ATOMIC_UINT32 2668 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2671 #ifndef VMA_BEST_FIT 2684 #define VMA_BEST_FIT (1) 2687 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2692 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2695 #ifndef VMA_DEBUG_ALIGNMENT 2700 #define VMA_DEBUG_ALIGNMENT (1) 2703 #ifndef VMA_DEBUG_MARGIN 2708 #define VMA_DEBUG_MARGIN (0) 2711 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2716 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2719 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2725 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2728 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2733 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2736 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2741 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2744 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2745 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2749 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2750 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2754 #ifndef VMA_CLASS_NO_COPY 2755 #define VMA_CLASS_NO_COPY(className) \ 2757 className(const className&) = delete; \ 2758 className& operator=(const className&) = delete; 2761 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2764 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
2766 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
2767 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
2773 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2774 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2777 static inline uint32_t VmaCountBitsSet(uint32_t v)
2779 uint32_t c = v - ((v >> 1) & 0x55555555);
2780 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2781 c = ((c >> 4) + c) & 0x0F0F0F0F;
2782 c = ((c >> 8) + c) & 0x00FF00FF;
2783 c = ((c >> 16) + c) & 0x0000FFFF;
2789 template <
typename T>
2790 static inline T VmaAlignUp(T val, T align)
2792 return (val + align - 1) / align * align;
2796 template <
typename T>
2797 static inline T VmaAlignDown(T val, T align)
2799 return val / align * align;
2803 template <
typename T>
2804 inline T VmaRoundDiv(T x, T y)
2806 return (x + (y / (T)2)) / y;
2809 static inline bool VmaStrIsEmpty(
const char* pStr)
2811 return pStr == VMA_NULL || *pStr ==
'\0';
2816 template<
typename Iterator,
typename Compare>
2817 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2819 Iterator centerValue = end; --centerValue;
2820 Iterator insertIndex = beg;
2821 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2823 if(cmp(*memTypeIndex, *centerValue))
2825 if(insertIndex != memTypeIndex)
2827 VMA_SWAP(*memTypeIndex, *insertIndex);
2832 if(insertIndex != centerValue)
2834 VMA_SWAP(*insertIndex, *centerValue);
2839 template<
typename Iterator,
typename Compare>
2840 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2844 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2845 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2846 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2850 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2852 #endif // #ifndef VMA_SORT 2861 static inline bool VmaBlocksOnSamePage(
2862 VkDeviceSize resourceAOffset,
2863 VkDeviceSize resourceASize,
2864 VkDeviceSize resourceBOffset,
2865 VkDeviceSize pageSize)
2867 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2868 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2869 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2870 VkDeviceSize resourceBStart = resourceBOffset;
2871 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2872 return resourceAEndPage == resourceBStartPage;
2875 enum VmaSuballocationType
2877 VMA_SUBALLOCATION_TYPE_FREE = 0,
2878 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2879 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2880 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2881 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2882 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2883 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2892 static inline bool VmaIsBufferImageGranularityConflict(
2893 VmaSuballocationType suballocType1,
2894 VmaSuballocationType suballocType2)
2896 if(suballocType1 > suballocType2)
2898 VMA_SWAP(suballocType1, suballocType2);
2901 switch(suballocType1)
2903 case VMA_SUBALLOCATION_TYPE_FREE:
2905 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2907 case VMA_SUBALLOCATION_TYPE_BUFFER:
2909 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2910 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2911 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2913 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2914 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2915 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2916 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2918 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2919 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2927 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
2929 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
2930 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
2931 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
2933 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
2937 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
2939 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
2940 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
2941 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
2943 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
2954 VMA_CLASS_NO_COPY(VmaMutexLock)
2956 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2957 m_pMutex(useMutex ? &mutex : VMA_NULL)
2974 VMA_MUTEX* m_pMutex;
2977 #if VMA_DEBUG_GLOBAL_MUTEX 2978 static VMA_MUTEX gDebugGlobalMutex;
2979 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2981 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2985 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2996 template <
typename IterT,
typename KeyT,
typename CmpT>
2997 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2999 size_t down = 0, up = (end - beg);
3002 const size_t mid = (down + up) / 2;
3003 if(cmp(*(beg+mid), key))
3018 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3020 if((pAllocationCallbacks != VMA_NULL) &&
3021 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3023 return (*pAllocationCallbacks->pfnAllocation)(
3024 pAllocationCallbacks->pUserData,
3027 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3031 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3035 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3037 if((pAllocationCallbacks != VMA_NULL) &&
3038 (pAllocationCallbacks->pfnFree != VMA_NULL))
3040 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3044 VMA_SYSTEM_FREE(ptr);
3048 template<
typename T>
3049 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3051 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3054 template<
typename T>
3055 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3057 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3060 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3062 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3064 template<
typename T>
3065 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3068 VmaFree(pAllocationCallbacks, ptr);
3071 template<
typename T>
3072 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3076 for(
size_t i = count; i--; )
3080 VmaFree(pAllocationCallbacks, ptr);
3085 template<
typename T>
3086 class VmaStlAllocator
3089 const VkAllocationCallbacks*
const m_pCallbacks;
3090 typedef T value_type;
3092 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3093 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3095 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3096 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3098 template<
typename U>
3099 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3101 return m_pCallbacks == rhs.m_pCallbacks;
3103 template<
typename U>
3104 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3106 return m_pCallbacks != rhs.m_pCallbacks;
3109 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3112 #if VMA_USE_STL_VECTOR 3114 #define VmaVector std::vector 3116 template<
typename T,
typename allocatorT>
3117 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3119 vec.insert(vec.begin() + index, item);
3122 template<
typename T,
typename allocatorT>
3123 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3125 vec.erase(vec.begin() + index);
3128 #else // #if VMA_USE_STL_VECTOR 3133 template<
typename T,
typename AllocatorT>
3137 typedef T value_type;
3139 VmaVector(
const AllocatorT& allocator) :
3140 m_Allocator(allocator),
3147 VmaVector(
size_t count,
const AllocatorT& allocator) :
3148 m_Allocator(allocator),
3149 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3155 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3156 m_Allocator(src.m_Allocator),
3157 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3158 m_Count(src.m_Count),
3159 m_Capacity(src.m_Count)
3163 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3169 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3172 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3176 resize(rhs.m_Count);
3179 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3185 bool empty()
const {
return m_Count == 0; }
3186 size_t size()
const {
return m_Count; }
3187 T* data() {
return m_pArray; }
3188 const T* data()
const {
return m_pArray; }
3190 T& operator[](
size_t index)
3192 VMA_HEAVY_ASSERT(index < m_Count);
3193 return m_pArray[index];
3195 const T& operator[](
size_t index)
const 3197 VMA_HEAVY_ASSERT(index < m_Count);
3198 return m_pArray[index];
3203 VMA_HEAVY_ASSERT(m_Count > 0);
3206 const T& front()
const 3208 VMA_HEAVY_ASSERT(m_Count > 0);
3213 VMA_HEAVY_ASSERT(m_Count > 0);
3214 return m_pArray[m_Count - 1];
3216 const T& back()
const 3218 VMA_HEAVY_ASSERT(m_Count > 0);
3219 return m_pArray[m_Count - 1];
3222 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3224 newCapacity = VMA_MAX(newCapacity, m_Count);
3226 if((newCapacity < m_Capacity) && !freeMemory)
3228 newCapacity = m_Capacity;
3231 if(newCapacity != m_Capacity)
3233 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3236 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3238 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3239 m_Capacity = newCapacity;
3240 m_pArray = newArray;
3244 void resize(
size_t newCount,
bool freeMemory =
false)
3246 size_t newCapacity = m_Capacity;
3247 if(newCount > m_Capacity)
3249 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3253 newCapacity = newCount;
3256 if(newCapacity != m_Capacity)
3258 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3259 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3260 if(elementsToCopy != 0)
3262 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3264 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3265 m_Capacity = newCapacity;
3266 m_pArray = newArray;
3272 void clear(
bool freeMemory =
false)
3274 resize(0, freeMemory);
3277 void insert(
size_t index,
const T& src)
3279 VMA_HEAVY_ASSERT(index <= m_Count);
3280 const size_t oldCount = size();
3281 resize(oldCount + 1);
3282 if(index < oldCount)
3284 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3286 m_pArray[index] = src;
3289 void remove(
size_t index)
3291 VMA_HEAVY_ASSERT(index < m_Count);
3292 const size_t oldCount = size();
3293 if(index < oldCount - 1)
3295 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3297 resize(oldCount - 1);
3300 void push_back(
const T& src)
3302 const size_t newIndex = size();
3303 resize(newIndex + 1);
3304 m_pArray[newIndex] = src;
3309 VMA_HEAVY_ASSERT(m_Count > 0);
3313 void push_front(
const T& src)
3320 VMA_HEAVY_ASSERT(m_Count > 0);
3324 typedef T* iterator;
3326 iterator begin() {
return m_pArray; }
3327 iterator end() {
return m_pArray + m_Count; }
3330 AllocatorT m_Allocator;
3336 template<
typename T,
typename allocatorT>
3337 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3339 vec.insert(index, item);
3342 template<
typename T,
typename allocatorT>
3343 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3348 #endif // #if VMA_USE_STL_VECTOR 3350 template<
typename CmpLess,
typename VectorT>
3351 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3353 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3355 vector.data() + vector.size(),
3357 CmpLess()) - vector.data();
3358 VmaVectorInsert(vector, indexToInsert, value);
3359 return indexToInsert;
3362 template<
typename CmpLess,
typename VectorT>
3363 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3366 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3371 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3373 size_t indexToRemove = it - vector.begin();
3374 VmaVectorRemove(vector, indexToRemove);
3380 template<
typename CmpLess,
typename VectorT>
3381 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
3384 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3386 vector.data() + vector.size(),
3389 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
3391 return it - vector.begin();
3395 return vector.size();
3407 template<
typename T>
3408 class VmaPoolAllocator
3410 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3412 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3413 ~VmaPoolAllocator();
3421 uint32_t NextFreeIndex;
3428 uint32_t FirstFreeIndex;
3431 const VkAllocationCallbacks* m_pAllocationCallbacks;
3432 size_t m_ItemsPerBlock;
3433 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3435 ItemBlock& CreateNewBlock();
3438 template<
typename T>
3439 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3440 m_pAllocationCallbacks(pAllocationCallbacks),
3441 m_ItemsPerBlock(itemsPerBlock),
3442 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3444 VMA_ASSERT(itemsPerBlock > 0);
3447 template<
typename T>
3448 VmaPoolAllocator<T>::~VmaPoolAllocator()
3453 template<
typename T>
3454 void VmaPoolAllocator<T>::Clear()
3456 for(
size_t i = m_ItemBlocks.size(); i--; )
3457 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3458 m_ItemBlocks.clear();
3461 template<
typename T>
3462 T* VmaPoolAllocator<T>::Alloc()
3464 for(
size_t i = m_ItemBlocks.size(); i--; )
3466 ItemBlock& block = m_ItemBlocks[i];
3468 if(block.FirstFreeIndex != UINT32_MAX)
3470 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3471 block.FirstFreeIndex = pItem->NextFreeIndex;
3472 return &pItem->Value;
3477 ItemBlock& newBlock = CreateNewBlock();
3478 Item*
const pItem = &newBlock.pItems[0];
3479 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3480 return &pItem->Value;
3483 template<
typename T>
3484 void VmaPoolAllocator<T>::Free(T* ptr)
3487 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3489 ItemBlock& block = m_ItemBlocks[i];
3493 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3496 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3498 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3499 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3500 block.FirstFreeIndex = index;
3504 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3507 template<
typename T>
3508 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3510 ItemBlock newBlock = {
3511 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3513 m_ItemBlocks.push_back(newBlock);
3516 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3517 newBlock.pItems[i].NextFreeIndex = i + 1;
3518 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3519 return m_ItemBlocks.back();
3525 #if VMA_USE_STL_LIST 3527 #define VmaList std::list 3529 #else // #if VMA_USE_STL_LIST 3531 template<
typename T>
3540 template<
typename T>
3543 VMA_CLASS_NO_COPY(VmaRawList)
3545 typedef VmaListItem<T> ItemType;
3547 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3551 size_t GetCount()
const {
return m_Count; }
3552 bool IsEmpty()
const {
return m_Count == 0; }
3554 ItemType* Front() {
return m_pFront; }
3555 const ItemType* Front()
const {
return m_pFront; }
3556 ItemType* Back() {
return m_pBack; }
3557 const ItemType* Back()
const {
return m_pBack; }
3559 ItemType* PushBack();
3560 ItemType* PushFront();
3561 ItemType* PushBack(
const T& value);
3562 ItemType* PushFront(
const T& value);
3567 ItemType* InsertBefore(ItemType* pItem);
3569 ItemType* InsertAfter(ItemType* pItem);
3571 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3572 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3574 void Remove(ItemType* pItem);
3577 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3578 VmaPoolAllocator<ItemType> m_ItemAllocator;
3584 template<
typename T>
3585 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3586 m_pAllocationCallbacks(pAllocationCallbacks),
3587 m_ItemAllocator(pAllocationCallbacks, 128),
3594 template<
typename T>
3595 VmaRawList<T>::~VmaRawList()
3601 template<
typename T>
3602 void VmaRawList<T>::Clear()
3604 if(IsEmpty() ==
false)
3606 ItemType* pItem = m_pBack;
3607 while(pItem != VMA_NULL)
3609 ItemType*
const pPrevItem = pItem->pPrev;
3610 m_ItemAllocator.Free(pItem);
3613 m_pFront = VMA_NULL;
3619 template<
typename T>
3620 VmaListItem<T>* VmaRawList<T>::PushBack()
3622 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3623 pNewItem->pNext = VMA_NULL;
3626 pNewItem->pPrev = VMA_NULL;
3627 m_pFront = pNewItem;
3633 pNewItem->pPrev = m_pBack;
3634 m_pBack->pNext = pNewItem;
3641 template<
typename T>
3642 VmaListItem<T>* VmaRawList<T>::PushFront()
3644 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3645 pNewItem->pPrev = VMA_NULL;
3648 pNewItem->pNext = VMA_NULL;
3649 m_pFront = pNewItem;
3655 pNewItem->pNext = m_pFront;
3656 m_pFront->pPrev = pNewItem;
3657 m_pFront = pNewItem;
3663 template<
typename T>
3664 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3666 ItemType*
const pNewItem = PushBack();
3667 pNewItem->Value = value;
3671 template<
typename T>
3672 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3674 ItemType*
const pNewItem = PushFront();
3675 pNewItem->Value = value;
3679 template<
typename T>
3680 void VmaRawList<T>::PopBack()
3682 VMA_HEAVY_ASSERT(m_Count > 0);
3683 ItemType*
const pBackItem = m_pBack;
3684 ItemType*
const pPrevItem = pBackItem->pPrev;
3685 if(pPrevItem != VMA_NULL)
3687 pPrevItem->pNext = VMA_NULL;
3689 m_pBack = pPrevItem;
3690 m_ItemAllocator.Free(pBackItem);
3694 template<
typename T>
3695 void VmaRawList<T>::PopFront()
3697 VMA_HEAVY_ASSERT(m_Count > 0);
3698 ItemType*
const pFrontItem = m_pFront;
3699 ItemType*
const pNextItem = pFrontItem->pNext;
3700 if(pNextItem != VMA_NULL)
3702 pNextItem->pPrev = VMA_NULL;
3704 m_pFront = pNextItem;
3705 m_ItemAllocator.Free(pFrontItem);
3709 template<
typename T>
3710 void VmaRawList<T>::Remove(ItemType* pItem)
3712 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3713 VMA_HEAVY_ASSERT(m_Count > 0);
3715 if(pItem->pPrev != VMA_NULL)
3717 pItem->pPrev->pNext = pItem->pNext;
3721 VMA_HEAVY_ASSERT(m_pFront == pItem);
3722 m_pFront = pItem->pNext;
3725 if(pItem->pNext != VMA_NULL)
3727 pItem->pNext->pPrev = pItem->pPrev;
3731 VMA_HEAVY_ASSERT(m_pBack == pItem);
3732 m_pBack = pItem->pPrev;
3735 m_ItemAllocator.Free(pItem);
3739 template<
typename T>
3740 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3742 if(pItem != VMA_NULL)
3744 ItemType*
const prevItem = pItem->pPrev;
3745 ItemType*
const newItem = m_ItemAllocator.Alloc();
3746 newItem->pPrev = prevItem;
3747 newItem->pNext = pItem;
3748 pItem->pPrev = newItem;
3749 if(prevItem != VMA_NULL)
3751 prevItem->pNext = newItem;
3755 VMA_HEAVY_ASSERT(m_pFront == pItem);
3765 template<
typename T>
3766 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3768 if(pItem != VMA_NULL)
3770 ItemType*
const nextItem = pItem->pNext;
3771 ItemType*
const newItem = m_ItemAllocator.Alloc();
3772 newItem->pNext = nextItem;
3773 newItem->pPrev = pItem;
3774 pItem->pNext = newItem;
3775 if(nextItem != VMA_NULL)
3777 nextItem->pPrev = newItem;
3781 VMA_HEAVY_ASSERT(m_pBack == pItem);
3791 template<
typename T>
3792 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3794 ItemType*
const newItem = InsertBefore(pItem);
3795 newItem->Value = value;
3799 template<
typename T>
3800 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3802 ItemType*
const newItem = InsertAfter(pItem);
3803 newItem->Value = value;
3807 template<
typename T,
typename AllocatorT>
3810 VMA_CLASS_NO_COPY(VmaList)
3821 T& operator*()
const 3823 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3824 return m_pItem->Value;
3826 T* operator->()
const 3828 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3829 return &m_pItem->Value;
3832 iterator& operator++()
3834 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3835 m_pItem = m_pItem->pNext;
3838 iterator& operator--()
3840 if(m_pItem != VMA_NULL)
3842 m_pItem = m_pItem->pPrev;
3846 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3847 m_pItem = m_pList->Back();
3852 iterator operator++(
int)
3854 iterator result = *
this;
3858 iterator operator--(
int)
3860 iterator result = *
this;
3865 bool operator==(
const iterator& rhs)
const 3867 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3868 return m_pItem == rhs.m_pItem;
3870 bool operator!=(
const iterator& rhs)
const 3872 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3873 return m_pItem != rhs.m_pItem;
3877 VmaRawList<T>* m_pList;
3878 VmaListItem<T>* m_pItem;
3880 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3886 friend class VmaList<T, AllocatorT>;
3889 class const_iterator
3898 const_iterator(
const iterator& src) :
3899 m_pList(src.m_pList),
3900 m_pItem(src.m_pItem)
3904 const T& operator*()
const 3906 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3907 return m_pItem->Value;
3909 const T* operator->()
const 3911 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3912 return &m_pItem->Value;
3915 const_iterator& operator++()
3917 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3918 m_pItem = m_pItem->pNext;
3921 const_iterator& operator--()
3923 if(m_pItem != VMA_NULL)
3925 m_pItem = m_pItem->pPrev;
3929 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3930 m_pItem = m_pList->Back();
3935 const_iterator operator++(
int)
3937 const_iterator result = *
this;
3941 const_iterator operator--(
int)
3943 const_iterator result = *
this;
3948 bool operator==(
const const_iterator& rhs)
const 3950 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3951 return m_pItem == rhs.m_pItem;
3953 bool operator!=(
const const_iterator& rhs)
const 3955 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3956 return m_pItem != rhs.m_pItem;
3960 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3966 const VmaRawList<T>* m_pList;
3967 const VmaListItem<T>* m_pItem;
3969 friend class VmaList<T, AllocatorT>;
3972 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3974 bool empty()
const {
return m_RawList.IsEmpty(); }
3975 size_t size()
const {
return m_RawList.GetCount(); }
3977 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3978 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3980 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3981 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3983 void clear() { m_RawList.Clear(); }
3984 void push_back(
const T& value) { m_RawList.PushBack(value); }
3985 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3986 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3989 VmaRawList<T> m_RawList;
3992 #endif // #if VMA_USE_STL_LIST 4000 #if VMA_USE_STL_UNORDERED_MAP 4002 #define VmaPair std::pair 4004 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4005 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4007 #else // #if VMA_USE_STL_UNORDERED_MAP 4009 template<
typename T1,
typename T2>
4015 VmaPair() : first(), second() { }
4016 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4022 template<
typename KeyT,
typename ValueT>
4026 typedef VmaPair<KeyT, ValueT> PairType;
4027 typedef PairType* iterator;
4029 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4031 iterator begin() {
return m_Vector.begin(); }
4032 iterator end() {
return m_Vector.end(); }
4034 void insert(
const PairType& pair);
4035 iterator find(
const KeyT& key);
4036 void erase(iterator it);
4039 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4042 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4044 template<
typename FirstT,
typename SecondT>
4045 struct VmaPairFirstLess
4047 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4049 return lhs.first < rhs.first;
4051 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4053 return lhs.first < rhsFirst;
4057 template<
typename KeyT,
typename ValueT>
4058 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4060 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4062 m_Vector.data() + m_Vector.size(),
4064 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4065 VmaVectorInsert(m_Vector, indexToInsert, pair);
4068 template<
typename KeyT,
typename ValueT>
4069 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4071 PairType* it = VmaBinaryFindFirstNotLess(
4073 m_Vector.data() + m_Vector.size(),
4075 VmaPairFirstLess<KeyT, ValueT>());
4076 if((it != m_Vector.end()) && (it->first == key))
4082 return m_Vector.end();
4086 template<
typename KeyT,
typename ValueT>
4087 void VmaMap<KeyT, ValueT>::erase(iterator it)
4089 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4092 #endif // #if VMA_USE_STL_UNORDERED_MAP 4098 class VmaDeviceMemoryBlock;
4100 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4102 struct VmaAllocation_T
4104 VMA_CLASS_NO_COPY(VmaAllocation_T)
4106 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4110 FLAG_USER_DATA_STRING = 0x01,
4114 enum ALLOCATION_TYPE
4116 ALLOCATION_TYPE_NONE,
4117 ALLOCATION_TYPE_BLOCK,
4118 ALLOCATION_TYPE_DEDICATED,
4121 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4124 m_pUserData(VMA_NULL),
4125 m_LastUseFrameIndex(currentFrameIndex),
4126 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4127 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4129 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4131 #if VMA_STATS_STRING_ENABLED 4132 m_CreationFrameIndex = currentFrameIndex;
4133 m_BufferImageUsage = 0;
4139 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4142 VMA_ASSERT(m_pUserData == VMA_NULL);
4145 void InitBlockAllocation(
4147 VmaDeviceMemoryBlock* block,
4148 VkDeviceSize offset,
4149 VkDeviceSize alignment,
4151 VmaSuballocationType suballocationType,
4155 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4156 VMA_ASSERT(block != VMA_NULL);
4157 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4158 m_Alignment = alignment;
4160 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4161 m_SuballocationType = (uint8_t)suballocationType;
4162 m_BlockAllocation.m_hPool = hPool;
4163 m_BlockAllocation.m_Block = block;
4164 m_BlockAllocation.m_Offset = offset;
4165 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4170 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4171 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4172 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4173 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4174 m_BlockAllocation.m_Block = VMA_NULL;
4175 m_BlockAllocation.m_Offset = 0;
4176 m_BlockAllocation.m_CanBecomeLost =
true;
4179 void ChangeBlockAllocation(
4181 VmaDeviceMemoryBlock* block,
4182 VkDeviceSize offset);
4185 void InitDedicatedAllocation(
4186 uint32_t memoryTypeIndex,
4187 VkDeviceMemory hMemory,
4188 VmaSuballocationType suballocationType,
4192 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4193 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4194 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4197 m_SuballocationType = (uint8_t)suballocationType;
4198 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4199 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4200 m_DedicatedAllocation.m_hMemory = hMemory;
4201 m_DedicatedAllocation.m_pMappedData = pMappedData;
4204 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4205 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4206 VkDeviceSize GetSize()
const {
return m_Size; }
4207 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4208 void* GetUserData()
const {
return m_pUserData; }
4209 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4210 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4212 VmaDeviceMemoryBlock* GetBlock()
const 4214 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4215 return m_BlockAllocation.m_Block;
4217 VkDeviceSize GetOffset()
const;
4218 VkDeviceMemory GetMemory()
const;
4219 uint32_t GetMemoryTypeIndex()
const;
4220 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4221 void* GetMappedData()
const;
4222 bool CanBecomeLost()
const;
4225 uint32_t GetLastUseFrameIndex()
const 4227 return m_LastUseFrameIndex.load();
4229 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4231 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4241 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4243 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4245 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4256 void BlockAllocMap();
4257 void BlockAllocUnmap();
4258 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4261 #if VMA_STATS_STRING_ENABLED 4262 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4263 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4265 void InitBufferImageUsage(uint32_t bufferImageUsage)
4267 VMA_ASSERT(m_BufferImageUsage == 0);
4268 m_BufferImageUsage = bufferImageUsage;
4271 void PrintParameters(
class VmaJsonWriter& json)
const;
4275 VkDeviceSize m_Alignment;
4276 VkDeviceSize m_Size;
4278 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4280 uint8_t m_SuballocationType;
4287 struct BlockAllocation
4290 VmaDeviceMemoryBlock* m_Block;
4291 VkDeviceSize m_Offset;
4292 bool m_CanBecomeLost;
4296 struct DedicatedAllocation
4298 uint32_t m_MemoryTypeIndex;
4299 VkDeviceMemory m_hMemory;
4300 void* m_pMappedData;
4306 BlockAllocation m_BlockAllocation;
4308 DedicatedAllocation m_DedicatedAllocation;
4311 #if VMA_STATS_STRING_ENABLED 4312 uint32_t m_CreationFrameIndex;
4313 uint32_t m_BufferImageUsage;
4323 struct VmaSuballocation
4325 VkDeviceSize offset;
4328 VmaSuballocationType type;
4331 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4334 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4349 struct VmaAllocationRequest
4351 VkDeviceSize offset;
4352 VkDeviceSize sumFreeSize;
4353 VkDeviceSize sumItemSize;
4354 VmaSuballocationList::iterator item;
4355 size_t itemsToMakeLostCount;
4357 VkDeviceSize CalcCost()
const 4359 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4367 class VmaBlockMetadata
4369 VMA_CLASS_NO_COPY(VmaBlockMetadata)
4372 ~VmaBlockMetadata();
4373 void Init(VkDeviceSize size);
4376 bool Validate()
const;
4377 VkDeviceSize GetSize()
const {
return m_Size; }
4378 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4379 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4380 VkDeviceSize GetUnusedRangeSizeMax()
const;
4382 bool IsEmpty()
const;
4384 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4387 #if VMA_STATS_STRING_ENABLED 4388 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4394 bool CreateAllocationRequest(
4395 uint32_t currentFrameIndex,
4396 uint32_t frameInUseCount,
4397 VkDeviceSize bufferImageGranularity,
4398 VkDeviceSize allocSize,
4399 VkDeviceSize allocAlignment,
4400 VmaSuballocationType allocType,
4401 bool canMakeOtherLost,
4402 VmaAllocationRequest* pAllocationRequest);
4404 bool MakeRequestedAllocationsLost(
4405 uint32_t currentFrameIndex,
4406 uint32_t frameInUseCount,
4407 VmaAllocationRequest* pAllocationRequest);
4409 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4411 VkResult CheckCorruption(
const void* pBlockData);
4415 const VmaAllocationRequest& request,
4416 VmaSuballocationType type,
4417 VkDeviceSize allocSize,
4422 void FreeAtOffset(VkDeviceSize offset);
4425 VkDeviceSize m_Size;
4426 uint32_t m_FreeCount;
4427 VkDeviceSize m_SumFreeSize;
4428 VmaSuballocationList m_Suballocations;
4431 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4433 bool ValidateFreeSuballocationList()
const;
4437 bool CheckAllocation(
4438 uint32_t currentFrameIndex,
4439 uint32_t frameInUseCount,
4440 VkDeviceSize bufferImageGranularity,
4441 VkDeviceSize allocSize,
4442 VkDeviceSize allocAlignment,
4443 VmaSuballocationType allocType,
4444 VmaSuballocationList::const_iterator suballocItem,
4445 bool canMakeOtherLost,
4446 VkDeviceSize* pOffset,
4447 size_t* itemsToMakeLostCount,
4448 VkDeviceSize* pSumFreeSize,
4449 VkDeviceSize* pSumItemSize)
const;
4451 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4455 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4458 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4461 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4470 class VmaDeviceMemoryBlock
4472 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
4474 VmaBlockMetadata m_Metadata;
4478 ~VmaDeviceMemoryBlock()
4480 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
4481 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4486 uint32_t newMemoryTypeIndex,
4487 VkDeviceMemory newMemory,
4488 VkDeviceSize newSize,
4493 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
4494 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4495 uint32_t GetId()
const {
return m_Id; }
4496 void* GetMappedData()
const {
return m_pMappedData; }
4499 bool Validate()
const;
4504 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
4507 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
4508 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
4510 VkResult BindBufferMemory(
4514 VkResult BindImageMemory(
4520 uint32_t m_MemoryTypeIndex;
4522 VkDeviceMemory m_hMemory;
4527 uint32_t m_MapCount;
4528 void* m_pMappedData;
4531 struct VmaPointerLess
4533 bool operator()(
const void* lhs,
const void* rhs)
const 4539 class VmaDefragmentator;
4547 struct VmaBlockVector
4549 VMA_CLASS_NO_COPY(VmaBlockVector)
4553 uint32_t memoryTypeIndex,
4554 VkDeviceSize preferredBlockSize,
4555 size_t minBlockCount,
4556 size_t maxBlockCount,
4557 VkDeviceSize bufferImageGranularity,
4558 uint32_t frameInUseCount,
4562 VkResult CreateMinBlocks();
4564 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4565 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
4566 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
4567 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
4571 bool IsEmpty()
const {
return m_Blocks.empty(); }
4572 bool IsCorruptionDetectionEnabled()
const;
4576 uint32_t currentFrameIndex,
4578 VkDeviceSize alignment,
4580 VmaSuballocationType suballocType,
4589 #if VMA_STATS_STRING_ENABLED 4590 void PrintDetailedMap(
class VmaJsonWriter& json);
4593 void MakePoolAllocationsLost(
4594 uint32_t currentFrameIndex,
4595 size_t* pLostAllocationCount);
4596 VkResult CheckCorruption();
4598 VmaDefragmentator* EnsureDefragmentator(
4600 uint32_t currentFrameIndex);
4602 VkResult Defragment(
4604 VkDeviceSize& maxBytesToMove,
4605 uint32_t& maxAllocationsToMove);
4607 void DestroyDefragmentator();
4610 friend class VmaDefragmentator;
4613 const uint32_t m_MemoryTypeIndex;
4614 const VkDeviceSize m_PreferredBlockSize;
4615 const size_t m_MinBlockCount;
4616 const size_t m_MaxBlockCount;
4617 const VkDeviceSize m_BufferImageGranularity;
4618 const uint32_t m_FrameInUseCount;
4619 const bool m_IsCustomPool;
4622 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
4626 bool m_HasEmptyBlock;
4627 VmaDefragmentator* m_pDefragmentator;
4628 uint32_t m_NextBlockId;
4630 VkDeviceSize CalcMaxBlockSize()
const;
4633 void Remove(VmaDeviceMemoryBlock* pBlock);
4637 void IncrementallySortBlocks();
4639 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
4644 VMA_CLASS_NO_COPY(VmaPool_T)
4646 VmaBlockVector m_BlockVector;
4653 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
4654 uint32_t GetId()
const {
return m_Id; }
4655 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
4657 #if VMA_STATS_STRING_ENABLED 4665 class VmaDefragmentator
4667 VMA_CLASS_NO_COPY(VmaDefragmentator)
4670 VmaBlockVector*
const m_pBlockVector;
4671 uint32_t m_CurrentFrameIndex;
4672 VkDeviceSize m_BytesMoved;
4673 uint32_t m_AllocationsMoved;
4675 struct AllocationInfo
4678 VkBool32* m_pChanged;
4681 m_hAllocation(VK_NULL_HANDLE),
4682 m_pChanged(VMA_NULL)
4687 struct AllocationInfoSizeGreater
4689 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 4691 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4696 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4700 VmaDeviceMemoryBlock* m_pBlock;
4701 bool m_HasNonMovableAllocations;
4702 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4704 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4706 m_HasNonMovableAllocations(true),
4707 m_Allocations(pAllocationCallbacks),
4708 m_pMappedDataForDefragmentation(VMA_NULL)
4712 void CalcHasNonMovableAllocations()
4714 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4715 const size_t defragmentAllocCount = m_Allocations.size();
4716 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4719 void SortAllocationsBySizeDescecnding()
4721 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4724 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
4729 void* m_pMappedDataForDefragmentation;
4732 struct BlockPointerLess
4734 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4736 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4738 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4740 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4746 struct BlockInfoCompareMoveDestination
4748 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4750 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4754 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4758 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4766 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4767 BlockInfoVector m_Blocks;
4769 VkResult DefragmentRound(
4770 VkDeviceSize maxBytesToMove,
4771 uint32_t maxAllocationsToMove);
4773 static bool MoveMakesSense(
4774 size_t dstBlockIndex, VkDeviceSize dstOffset,
4775 size_t srcBlockIndex, VkDeviceSize srcOffset);
4780 VmaBlockVector* pBlockVector,
4781 uint32_t currentFrameIndex);
4783 ~VmaDefragmentator();
4785 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4786 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4788 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
4790 VkResult Defragment(
4791 VkDeviceSize maxBytesToMove,
4792 uint32_t maxAllocationsToMove);
4795 #if VMA_RECORDING_ENABLED 4804 void RecordCreateAllocator(uint32_t frameIndex);
4805 void RecordDestroyAllocator(uint32_t frameIndex);
4806 void RecordCreatePool(uint32_t frameIndex,
4809 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
4810 void RecordAllocateMemory(uint32_t frameIndex,
4811 const VkMemoryRequirements& vkMemReq,
4814 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
4815 const VkMemoryRequirements& vkMemReq,
4816 bool requiresDedicatedAllocation,
4817 bool prefersDedicatedAllocation,
4820 void RecordAllocateMemoryForImage(uint32_t frameIndex,
4821 const VkMemoryRequirements& vkMemReq,
4822 bool requiresDedicatedAllocation,
4823 bool prefersDedicatedAllocation,
4826 void RecordFreeMemory(uint32_t frameIndex,
4828 void RecordSetAllocationUserData(uint32_t frameIndex,
4830 const void* pUserData);
4831 void RecordCreateLostAllocation(uint32_t frameIndex,
4833 void RecordMapMemory(uint32_t frameIndex,
4835 void RecordUnmapMemory(uint32_t frameIndex,
4837 void RecordFlushAllocation(uint32_t frameIndex,
4838 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
4839 void RecordInvalidateAllocation(uint32_t frameIndex,
4840 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
4841 void RecordCreateBuffer(uint32_t frameIndex,
4842 const VkBufferCreateInfo& bufCreateInfo,
4845 void RecordCreateImage(uint32_t frameIndex,
4846 const VkImageCreateInfo& imageCreateInfo,
4849 void RecordDestroyBuffer(uint32_t frameIndex,
4851 void RecordDestroyImage(uint32_t frameIndex,
4853 void RecordTouchAllocation(uint32_t frameIndex,
4855 void RecordGetAllocationInfo(uint32_t frameIndex,
4865 class UserDataString
4869 const char* GetString()
const {
return m_Str; }
4879 VMA_MUTEX m_FileMutex;
4881 int64_t m_StartCounter;
4883 void GetBasicParams(CallParams& outParams);
4887 #endif // #if VMA_RECORDING_ENABLED 4890 struct VmaAllocator_T
4892 VMA_CLASS_NO_COPY(VmaAllocator_T)
4895 bool m_UseKhrDedicatedAllocation;
4897 bool m_AllocationCallbacksSpecified;
4898 VkAllocationCallbacks m_AllocationCallbacks;
4902 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4903 VMA_MUTEX m_HeapSizeLimitMutex;
4905 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4906 VkPhysicalDeviceMemoryProperties m_MemProps;
4909 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4912 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4913 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4914 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4920 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4922 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4926 return m_VulkanFunctions;
4929 VkDeviceSize GetBufferImageGranularity()
const 4932 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4933 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4936 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4937 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4939 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4941 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4942 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4945 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 4947 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
4948 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
4951 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 4953 return IsMemoryTypeNonCoherent(memTypeIndex) ?
4954 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
4955 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
4958 bool IsIntegratedGpu()
const 4960 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
4963 #if VMA_RECORDING_ENABLED 4964 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
4967 void GetBufferMemoryRequirements(
4969 VkMemoryRequirements& memReq,
4970 bool& requiresDedicatedAllocation,
4971 bool& prefersDedicatedAllocation)
const;
4972 void GetImageMemoryRequirements(
4974 VkMemoryRequirements& memReq,
4975 bool& requiresDedicatedAllocation,
4976 bool& prefersDedicatedAllocation)
const;
4979 VkResult AllocateMemory(
4980 const VkMemoryRequirements& vkMemReq,
4981 bool requiresDedicatedAllocation,
4982 bool prefersDedicatedAllocation,
4983 VkBuffer dedicatedBuffer,
4984 VkImage dedicatedImage,
4986 VmaSuballocationType suballocType,
4992 void CalculateStats(
VmaStats* pStats);
4994 #if VMA_STATS_STRING_ENABLED 4995 void PrintDetailedMap(
class VmaJsonWriter& json);
4998 VkResult Defragment(
5000 size_t allocationCount,
5001 VkBool32* pAllocationsChanged,
5009 void DestroyPool(
VmaPool pool);
5012 void SetCurrentFrameIndex(uint32_t frameIndex);
5013 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5015 void MakePoolAllocationsLost(
5017 size_t* pLostAllocationCount);
5018 VkResult CheckPoolCorruption(
VmaPool hPool);
5019 VkResult CheckCorruption(uint32_t memoryTypeBits);
5023 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5024 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5029 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5030 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5032 void FlushOrInvalidateAllocation(
5034 VkDeviceSize offset, VkDeviceSize size,
5035 VMA_CACHE_OPERATION op);
5037 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5040 VkDeviceSize m_PreferredLargeHeapBlockSize;
5042 VkPhysicalDevice m_PhysicalDevice;
5043 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5045 VMA_MUTEX m_PoolsMutex;
5047 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5048 uint32_t m_NextPoolId;
5052 #if VMA_RECORDING_ENABLED 5053 VmaRecorder* m_pRecorder;
5058 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5060 VkResult AllocateMemoryOfType(
5062 VkDeviceSize alignment,
5063 bool dedicatedAllocation,
5064 VkBuffer dedicatedBuffer,
5065 VkImage dedicatedImage,
5067 uint32_t memTypeIndex,
5068 VmaSuballocationType suballocType,
5072 VkResult AllocateDedicatedMemory(
5074 VmaSuballocationType suballocType,
5075 uint32_t memTypeIndex,
5077 bool isUserDataString,
5079 VkBuffer dedicatedBuffer,
5080 VkImage dedicatedImage,
5090 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5092 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5095 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5097 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5100 template<
typename T>
5103 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5106 template<
typename T>
5107 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5109 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5112 template<
typename T>
5113 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5118 VmaFree(hAllocator, ptr);
5122 template<
typename T>
5123 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5127 for(
size_t i = count; i--; )
5129 VmaFree(hAllocator, ptr);
5136 #if VMA_STATS_STRING_ENABLED 5138 class VmaStringBuilder
5141 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5142 size_t GetLength()
const {
return m_Data.size(); }
5143 const char* GetData()
const {
return m_Data.data(); }
5145 void Add(
char ch) { m_Data.push_back(ch); }
5146 void Add(
const char* pStr);
5147 void AddNewLine() { Add(
'\n'); }
5148 void AddNumber(uint32_t num);
5149 void AddNumber(uint64_t num);
5150 void AddPointer(
const void* ptr);
5153 VmaVector< char, VmaStlAllocator<char> > m_Data;
5156 void VmaStringBuilder::Add(
const char* pStr)
5158 const size_t strLen = strlen(pStr);
5161 const size_t oldCount = m_Data.size();
5162 m_Data.resize(oldCount + strLen);
5163 memcpy(m_Data.data() + oldCount, pStr, strLen);
5167 void VmaStringBuilder::AddNumber(uint32_t num)
5170 VmaUint32ToStr(buf,
sizeof(buf), num);
5174 void VmaStringBuilder::AddNumber(uint64_t num)
5177 VmaUint64ToStr(buf,
sizeof(buf), num);
5181 void VmaStringBuilder::AddPointer(
const void* ptr)
5184 VmaPtrToStr(buf,
sizeof(buf), ptr);
5188 #endif // #if VMA_STATS_STRING_ENABLED 5193 #if VMA_STATS_STRING_ENABLED 5197 VMA_CLASS_NO_COPY(VmaJsonWriter)
5199 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
5202 void BeginObject(
bool singleLine =
false);
5205 void BeginArray(
bool singleLine =
false);
5208 void WriteString(
const char* pStr);
5209 void BeginString(
const char* pStr = VMA_NULL);
5210 void ContinueString(
const char* pStr);
5211 void ContinueString(uint32_t n);
5212 void ContinueString(uint64_t n);
5213 void ContinueString_Pointer(
const void* ptr);
5214 void EndString(
const char* pStr = VMA_NULL);
5216 void WriteNumber(uint32_t n);
5217 void WriteNumber(uint64_t n);
5218 void WriteBool(
bool b);
5222 static const char*
const INDENT;
5224 enum COLLECTION_TYPE
5226 COLLECTION_TYPE_OBJECT,
5227 COLLECTION_TYPE_ARRAY,
5231 COLLECTION_TYPE type;
5232 uint32_t valueCount;
5233 bool singleLineMode;
5236 VmaStringBuilder& m_SB;
5237 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
5238 bool m_InsideString;
5240 void BeginValue(
bool isString);
5241 void WriteIndent(
bool oneLess =
false);
5244 const char*
const VmaJsonWriter::INDENT =
" ";
5246 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
5248 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
5249 m_InsideString(false)
5253 VmaJsonWriter::~VmaJsonWriter()
5255 VMA_ASSERT(!m_InsideString);
5256 VMA_ASSERT(m_Stack.empty());
5259 void VmaJsonWriter::BeginObject(
bool singleLine)
5261 VMA_ASSERT(!m_InsideString);
5267 item.type = COLLECTION_TYPE_OBJECT;
5268 item.valueCount = 0;
5269 item.singleLineMode = singleLine;
5270 m_Stack.push_back(item);
5273 void VmaJsonWriter::EndObject()
5275 VMA_ASSERT(!m_InsideString);
5280 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
5284 void VmaJsonWriter::BeginArray(
bool singleLine)
5286 VMA_ASSERT(!m_InsideString);
5292 item.type = COLLECTION_TYPE_ARRAY;
5293 item.valueCount = 0;
5294 item.singleLineMode = singleLine;
5295 m_Stack.push_back(item);
5298 void VmaJsonWriter::EndArray()
5300 VMA_ASSERT(!m_InsideString);
5305 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
5309 void VmaJsonWriter::WriteString(
const char* pStr)
5315 void VmaJsonWriter::BeginString(
const char* pStr)
5317 VMA_ASSERT(!m_InsideString);
5321 m_InsideString =
true;
5322 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5324 ContinueString(pStr);
5328 void VmaJsonWriter::ContinueString(
const char* pStr)
5330 VMA_ASSERT(m_InsideString);
5332 const size_t strLen = strlen(pStr);
5333 for(
size_t i = 0; i < strLen; ++i)
5366 VMA_ASSERT(0 &&
"Character not currently supported.");
5372 void VmaJsonWriter::ContinueString(uint32_t n)
5374 VMA_ASSERT(m_InsideString);
5378 void VmaJsonWriter::ContinueString(uint64_t n)
5380 VMA_ASSERT(m_InsideString);
5384 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
5386 VMA_ASSERT(m_InsideString);
5387 m_SB.AddPointer(ptr);
5390 void VmaJsonWriter::EndString(
const char* pStr)
5392 VMA_ASSERT(m_InsideString);
5393 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5395 ContinueString(pStr);
5398 m_InsideString =
false;
5401 void VmaJsonWriter::WriteNumber(uint32_t n)
5403 VMA_ASSERT(!m_InsideString);
5408 void VmaJsonWriter::WriteNumber(uint64_t n)
5410 VMA_ASSERT(!m_InsideString);
5415 void VmaJsonWriter::WriteBool(
bool b)
5417 VMA_ASSERT(!m_InsideString);
5419 m_SB.Add(b ?
"true" :
"false");
5422 void VmaJsonWriter::WriteNull()
5424 VMA_ASSERT(!m_InsideString);
5429 void VmaJsonWriter::BeginValue(
bool isString)
5431 if(!m_Stack.empty())
5433 StackItem& currItem = m_Stack.back();
5434 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5435 currItem.valueCount % 2 == 0)
5437 VMA_ASSERT(isString);
5440 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5441 currItem.valueCount % 2 != 0)
5445 else if(currItem.valueCount > 0)
5454 ++currItem.valueCount;
5458 void VmaJsonWriter::WriteIndent(
bool oneLess)
5460 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
5464 size_t count = m_Stack.size();
5465 if(count > 0 && oneLess)
5469 for(
size_t i = 0; i < count; ++i)
5476 #endif // #if VMA_STATS_STRING_ENABLED 5480 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
5482 if(IsUserDataString())
5484 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
5486 FreeUserDataString(hAllocator);
5488 if(pUserData != VMA_NULL)
5490 const char*
const newStrSrc = (
char*)pUserData;
5491 const size_t newStrLen = strlen(newStrSrc);
5492 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
5493 memcpy(newStrDst, newStrSrc, newStrLen + 1);
5494 m_pUserData = newStrDst;
5499 m_pUserData = pUserData;
5503 void VmaAllocation_T::ChangeBlockAllocation(
5505 VmaDeviceMemoryBlock* block,
5506 VkDeviceSize offset)
5508 VMA_ASSERT(block != VMA_NULL);
5509 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5512 if(block != m_BlockAllocation.m_Block)
5514 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
5515 if(IsPersistentMap())
5517 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
5518 block->Map(hAllocator, mapRefCount, VMA_NULL);
5521 m_BlockAllocation.m_Block = block;
5522 m_BlockAllocation.m_Offset = offset;
5525 VkDeviceSize VmaAllocation_T::GetOffset()
const 5529 case ALLOCATION_TYPE_BLOCK:
5530 return m_BlockAllocation.m_Offset;
5531 case ALLOCATION_TYPE_DEDICATED:
5539 VkDeviceMemory VmaAllocation_T::GetMemory()
const 5543 case ALLOCATION_TYPE_BLOCK:
5544 return m_BlockAllocation.m_Block->GetDeviceMemory();
5545 case ALLOCATION_TYPE_DEDICATED:
5546 return m_DedicatedAllocation.m_hMemory;
5549 return VK_NULL_HANDLE;
5553 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 5557 case ALLOCATION_TYPE_BLOCK:
5558 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
5559 case ALLOCATION_TYPE_DEDICATED:
5560 return m_DedicatedAllocation.m_MemoryTypeIndex;
5567 void* VmaAllocation_T::GetMappedData()
const 5571 case ALLOCATION_TYPE_BLOCK:
5574 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
5575 VMA_ASSERT(pBlockData != VMA_NULL);
5576 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
5583 case ALLOCATION_TYPE_DEDICATED:
5584 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
5585 return m_DedicatedAllocation.m_pMappedData;
5592 bool VmaAllocation_T::CanBecomeLost()
const 5596 case ALLOCATION_TYPE_BLOCK:
5597 return m_BlockAllocation.m_CanBecomeLost;
5598 case ALLOCATION_TYPE_DEDICATED:
5606 VmaPool VmaAllocation_T::GetPool()
const 5608 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5609 return m_BlockAllocation.m_hPool;
5612 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5614 VMA_ASSERT(CanBecomeLost());
5620 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
5623 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
5628 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
5634 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
5644 #if VMA_STATS_STRING_ENABLED 5647 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
5656 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 5658 json.WriteString(
"Type");
5659 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
5661 json.WriteString(
"Size");
5662 json.WriteNumber(m_Size);
5664 if(m_pUserData != VMA_NULL)
5666 json.WriteString(
"UserData");
5667 if(IsUserDataString())
5669 json.WriteString((
const char*)m_pUserData);
5674 json.ContinueString_Pointer(m_pUserData);
5679 json.WriteString(
"CreationFrameIndex");
5680 json.WriteNumber(m_CreationFrameIndex);
5682 json.WriteString(
"LastUseFrameIndex");
5683 json.WriteNumber(GetLastUseFrameIndex());
5685 if(m_BufferImageUsage != 0)
5687 json.WriteString(
"Usage");
5688 json.WriteNumber(m_BufferImageUsage);
5694 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
5696 VMA_ASSERT(IsUserDataString());
5697 if(m_pUserData != VMA_NULL)
5699 char*
const oldStr = (
char*)m_pUserData;
5700 const size_t oldStrLen = strlen(oldStr);
5701 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
5702 m_pUserData = VMA_NULL;
5706 void VmaAllocation_T::BlockAllocMap()
5708 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5710 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5716 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
5720 void VmaAllocation_T::BlockAllocUnmap()
5722 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5724 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5730 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
5734 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
5736 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5740 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5742 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
5743 *ppData = m_DedicatedAllocation.m_pMappedData;
5749 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
5750 return VK_ERROR_MEMORY_MAP_FAILED;
5755 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5756 hAllocator->m_hDevice,
5757 m_DedicatedAllocation.m_hMemory,
5762 if(result == VK_SUCCESS)
5764 m_DedicatedAllocation.m_pMappedData = *ppData;
5771 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
5773 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5775 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5780 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
5781 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
5782 hAllocator->m_hDevice,
5783 m_DedicatedAllocation.m_hMemory);
5788 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
5792 #if VMA_STATS_STRING_ENABLED 5794 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
5798 json.WriteString(
"Blocks");
5801 json.WriteString(
"Allocations");
5804 json.WriteString(
"UnusedRanges");
5807 json.WriteString(
"UsedBytes");
5810 json.WriteString(
"UnusedBytes");
5815 json.WriteString(
"AllocationSize");
5816 json.BeginObject(
true);
5817 json.WriteString(
"Min");
5819 json.WriteString(
"Avg");
5821 json.WriteString(
"Max");
5828 json.WriteString(
"UnusedRangeSize");
5829 json.BeginObject(
true);
5830 json.WriteString(
"Min");
5832 json.WriteString(
"Avg");
5834 json.WriteString(
"Max");
5842 #endif // #if VMA_STATS_STRING_ENABLED 5844 struct VmaSuballocationItemSizeLess
5847 const VmaSuballocationList::iterator lhs,
5848 const VmaSuballocationList::iterator rhs)
const 5850 return lhs->size < rhs->size;
5853 const VmaSuballocationList::iterator lhs,
5854 VkDeviceSize rhsSize)
const 5856 return lhs->size < rhsSize;
5863 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
5867 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5868 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5872 VmaBlockMetadata::~VmaBlockMetadata()
5876 void VmaBlockMetadata::Init(VkDeviceSize size)
5880 m_SumFreeSize = size;
5882 VmaSuballocation suballoc = {};
5883 suballoc.offset = 0;
5884 suballoc.size = size;
5885 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5886 suballoc.hAllocation = VK_NULL_HANDLE;
5888 m_Suballocations.push_back(suballoc);
5889 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5891 m_FreeSuballocationsBySize.push_back(suballocItem);
5894 bool VmaBlockMetadata::Validate()
const 5896 if(m_Suballocations.empty())
5902 VkDeviceSize calculatedOffset = 0;
5904 uint32_t calculatedFreeCount = 0;
5906 VkDeviceSize calculatedSumFreeSize = 0;
5909 size_t freeSuballocationsToRegister = 0;
5911 bool prevFree =
false;
5913 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5914 suballocItem != m_Suballocations.cend();
5917 const VmaSuballocation& subAlloc = *suballocItem;
5920 if(subAlloc.offset != calculatedOffset)
5925 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5927 if(prevFree && currFree)
5932 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5939 calculatedSumFreeSize += subAlloc.size;
5940 ++calculatedFreeCount;
5941 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5943 ++freeSuballocationsToRegister;
5947 if(subAlloc.size < VMA_DEBUG_MARGIN)
5954 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5958 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5964 if(VMA_DEBUG_MARGIN > 0 && !prevFree)
5970 calculatedOffset += subAlloc.size;
5971 prevFree = currFree;
5976 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5981 VkDeviceSize lastSize = 0;
5982 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5984 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5987 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5992 if(suballocItem->size < lastSize)
5997 lastSize = suballocItem->size;
6001 if(!ValidateFreeSuballocationList() ||
6002 (calculatedOffset != m_Size) ||
6003 (calculatedSumFreeSize != m_SumFreeSize) ||
6004 (calculatedFreeCount != m_FreeCount))
6012 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 6014 if(!m_FreeSuballocationsBySize.empty())
6016 return m_FreeSuballocationsBySize.back()->size;
6024 bool VmaBlockMetadata::IsEmpty()
const 6026 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6029 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6033 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6045 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6046 suballocItem != m_Suballocations.cend();
6049 const VmaSuballocation& suballoc = *suballocItem;
6050 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6063 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 6065 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6067 inoutStats.
size += m_Size;
6074 #if VMA_STATS_STRING_ENABLED 6076 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 6080 json.WriteString(
"TotalBytes");
6081 json.WriteNumber(m_Size);
6083 json.WriteString(
"UnusedBytes");
6084 json.WriteNumber(m_SumFreeSize);
6086 json.WriteString(
"Allocations");
6087 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
6089 json.WriteString(
"UnusedRanges");
6090 json.WriteNumber(m_FreeCount);
6092 json.WriteString(
"Suballocations");
6095 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6096 suballocItem != m_Suballocations.cend();
6097 ++suballocItem, ++i)
6099 json.BeginObject(
true);
6101 json.WriteString(
"Offset");
6102 json.WriteNumber(suballocItem->offset);
6104 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6106 json.WriteString(
"Type");
6107 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6109 json.WriteString(
"Size");
6110 json.WriteNumber(suballocItem->size);
6114 suballocItem->hAllocation->PrintParameters(json);
6124 #endif // #if VMA_STATS_STRING_ENABLED 6136 bool VmaBlockMetadata::CreateAllocationRequest(
6137 uint32_t currentFrameIndex,
6138 uint32_t frameInUseCount,
6139 VkDeviceSize bufferImageGranularity,
6140 VkDeviceSize allocSize,
6141 VkDeviceSize allocAlignment,
6142 VmaSuballocationType allocType,
6143 bool canMakeOtherLost,
6144 VmaAllocationRequest* pAllocationRequest)
6146 VMA_ASSERT(allocSize > 0);
6147 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6148 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6149 VMA_HEAVY_ASSERT(Validate());
6152 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6158 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6159 if(freeSuballocCount > 0)
6164 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6165 m_FreeSuballocationsBySize.data(),
6166 m_FreeSuballocationsBySize.data() + freeSuballocCount,
6167 allocSize + 2 * VMA_DEBUG_MARGIN,
6168 VmaSuballocationItemSizeLess());
6169 size_t index = it - m_FreeSuballocationsBySize.data();
6170 for(; index < freeSuballocCount; ++index)
6175 bufferImageGranularity,
6179 m_FreeSuballocationsBySize[index],
6181 &pAllocationRequest->offset,
6182 &pAllocationRequest->itemsToMakeLostCount,
6183 &pAllocationRequest->sumFreeSize,
6184 &pAllocationRequest->sumItemSize))
6186 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6194 for(
size_t index = freeSuballocCount; index--; )
6199 bufferImageGranularity,
6203 m_FreeSuballocationsBySize[index],
6205 &pAllocationRequest->offset,
6206 &pAllocationRequest->itemsToMakeLostCount,
6207 &pAllocationRequest->sumFreeSize,
6208 &pAllocationRequest->sumItemSize))
6210 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6217 if(canMakeOtherLost)
6221 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
6222 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
6224 VmaAllocationRequest tmpAllocRequest = {};
6225 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
6226 suballocIt != m_Suballocations.end();
6229 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
6230 suballocIt->hAllocation->CanBecomeLost())
6235 bufferImageGranularity,
6241 &tmpAllocRequest.offset,
6242 &tmpAllocRequest.itemsToMakeLostCount,
6243 &tmpAllocRequest.sumFreeSize,
6244 &tmpAllocRequest.sumItemSize))
6246 tmpAllocRequest.item = suballocIt;
6248 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
6250 *pAllocationRequest = tmpAllocRequest;
6256 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
6265 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
6266 uint32_t currentFrameIndex,
6267 uint32_t frameInUseCount,
6268 VmaAllocationRequest* pAllocationRequest)
6270 while(pAllocationRequest->itemsToMakeLostCount > 0)
6272 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
6274 ++pAllocationRequest->item;
6276 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6277 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
6278 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
6279 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6281 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
6282 --pAllocationRequest->itemsToMakeLostCount;
6290 VMA_HEAVY_ASSERT(Validate());
6291 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6292 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
6297 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6299 uint32_t lostAllocationCount = 0;
6300 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6301 it != m_Suballocations.end();
6304 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
6305 it->hAllocation->CanBecomeLost() &&
6306 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6308 it = FreeSuballocation(it);
6309 ++lostAllocationCount;
6312 return lostAllocationCount;
6315 VkResult VmaBlockMetadata::CheckCorruption(
const void* pBlockData)
6317 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6318 it != m_Suballocations.end();
6321 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6323 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
6325 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
6326 return VK_ERROR_VALIDATION_FAILED_EXT;
6328 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
6330 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
6331 return VK_ERROR_VALIDATION_FAILED_EXT;
6339 void VmaBlockMetadata::Alloc(
6340 const VmaAllocationRequest& request,
6341 VmaSuballocationType type,
6342 VkDeviceSize allocSize,
6345 VMA_ASSERT(request.item != m_Suballocations.end());
6346 VmaSuballocation& suballoc = *request.item;
6348 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6350 VMA_ASSERT(request.offset >= suballoc.offset);
6351 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
6352 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
6353 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
6357 UnregisterFreeSuballocation(request.item);
6359 suballoc.offset = request.offset;
6360 suballoc.size = allocSize;
6361 suballoc.type = type;
6362 suballoc.hAllocation = hAllocation;
6367 VmaSuballocation paddingSuballoc = {};
6368 paddingSuballoc.offset = request.offset + allocSize;
6369 paddingSuballoc.size = paddingEnd;
6370 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6371 VmaSuballocationList::iterator next = request.item;
6373 const VmaSuballocationList::iterator paddingEndItem =
6374 m_Suballocations.insert(next, paddingSuballoc);
6375 RegisterFreeSuballocation(paddingEndItem);
6381 VmaSuballocation paddingSuballoc = {};
6382 paddingSuballoc.offset = request.offset - paddingBegin;
6383 paddingSuballoc.size = paddingBegin;
6384 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6385 const VmaSuballocationList::iterator paddingBeginItem =
6386 m_Suballocations.insert(request.item, paddingSuballoc);
6387 RegisterFreeSuballocation(paddingBeginItem);
6391 m_FreeCount = m_FreeCount - 1;
6392 if(paddingBegin > 0)
6400 m_SumFreeSize -= allocSize;
6405 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
6406 suballocItem != m_Suballocations.end();
6409 VmaSuballocation& suballoc = *suballocItem;
6410 if(suballoc.hAllocation == allocation)
6412 FreeSuballocation(suballocItem);
6413 VMA_HEAVY_ASSERT(Validate());
6417 VMA_ASSERT(0 &&
"Not found!");
6420 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
6422 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
6423 suballocItem != m_Suballocations.end();
6426 VmaSuballocation& suballoc = *suballocItem;
6427 if(suballoc.offset == offset)
6429 FreeSuballocation(suballocItem);
6433 VMA_ASSERT(0 &&
"Not found!");
6436 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 6438 VkDeviceSize lastSize = 0;
6439 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
6441 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
6443 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6448 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6453 if(it->size < lastSize)
6459 lastSize = it->size;
6464 bool VmaBlockMetadata::CheckAllocation(
6465 uint32_t currentFrameIndex,
6466 uint32_t frameInUseCount,
6467 VkDeviceSize bufferImageGranularity,
6468 VkDeviceSize allocSize,
6469 VkDeviceSize allocAlignment,
6470 VmaSuballocationType allocType,
6471 VmaSuballocationList::const_iterator suballocItem,
6472 bool canMakeOtherLost,
6473 VkDeviceSize* pOffset,
6474 size_t* itemsToMakeLostCount,
6475 VkDeviceSize* pSumFreeSize,
6476 VkDeviceSize* pSumItemSize)
const 6478 VMA_ASSERT(allocSize > 0);
6479 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6480 VMA_ASSERT(suballocItem != m_Suballocations.cend());
6481 VMA_ASSERT(pOffset != VMA_NULL);
6483 *itemsToMakeLostCount = 0;
6487 if(canMakeOtherLost)
6489 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6491 *pSumFreeSize = suballocItem->size;
6495 if(suballocItem->hAllocation->CanBecomeLost() &&
6496 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6498 ++*itemsToMakeLostCount;
6499 *pSumItemSize = suballocItem->size;
6508 if(m_Size - suballocItem->offset < allocSize)
6514 *pOffset = suballocItem->offset;
6517 if(VMA_DEBUG_MARGIN > 0)
6519 *pOffset += VMA_DEBUG_MARGIN;
6523 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
6527 if(bufferImageGranularity > 1)
6529 bool bufferImageGranularityConflict =
false;
6530 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6531 while(prevSuballocItem != m_Suballocations.cbegin())
6534 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6535 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6537 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6539 bufferImageGranularityConflict =
true;
6547 if(bufferImageGranularityConflict)
6549 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6555 if(*pOffset >= suballocItem->offset + suballocItem->size)
6561 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
6564 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
6566 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
6568 if(suballocItem->offset + totalSize > m_Size)
6575 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
6576 if(totalSize > suballocItem->size)
6578 VkDeviceSize remainingSize = totalSize - suballocItem->size;
6579 while(remainingSize > 0)
6582 if(lastSuballocItem == m_Suballocations.cend())
6586 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6588 *pSumFreeSize += lastSuballocItem->size;
6592 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
6593 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
6594 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6596 ++*itemsToMakeLostCount;
6597 *pSumItemSize += lastSuballocItem->size;
6604 remainingSize = (lastSuballocItem->size < remainingSize) ?
6605 remainingSize - lastSuballocItem->size : 0;
6611 if(bufferImageGranularity > 1)
6613 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
6615 while(nextSuballocItem != m_Suballocations.cend())
6617 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6618 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6620 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6622 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
6623 if(nextSuballoc.hAllocation->CanBecomeLost() &&
6624 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6626 ++*itemsToMakeLostCount;
6645 const VmaSuballocation& suballoc = *suballocItem;
6646 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6648 *pSumFreeSize = suballoc.size;
6651 if(suballoc.size < allocSize)
6657 *pOffset = suballoc.offset;
6660 if(VMA_DEBUG_MARGIN > 0)
6662 *pOffset += VMA_DEBUG_MARGIN;
6666 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
6670 if(bufferImageGranularity > 1)
6672 bool bufferImageGranularityConflict =
false;
6673 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6674 while(prevSuballocItem != m_Suballocations.cbegin())
6677 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6678 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6680 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6682 bufferImageGranularityConflict =
true;
6690 if(bufferImageGranularityConflict)
6692 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6697 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
6700 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
6703 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
6710 if(bufferImageGranularity > 1)
6712 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
6714 while(nextSuballocItem != m_Suballocations.cend())
6716 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6717 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6719 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6738 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
6740 VMA_ASSERT(item != m_Suballocations.end());
6741 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6743 VmaSuballocationList::iterator nextItem = item;
6745 VMA_ASSERT(nextItem != m_Suballocations.end());
6746 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6748 item->size += nextItem->size;
6750 m_Suballocations.erase(nextItem);
6753 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
6756 VmaSuballocation& suballoc = *suballocItem;
6757 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6758 suballoc.hAllocation = VK_NULL_HANDLE;
6762 m_SumFreeSize += suballoc.size;
6765 bool mergeWithNext =
false;
6766 bool mergeWithPrev =
false;
6768 VmaSuballocationList::iterator nextItem = suballocItem;
6770 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
6772 mergeWithNext =
true;
6775 VmaSuballocationList::iterator prevItem = suballocItem;
6776 if(suballocItem != m_Suballocations.begin())
6779 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6781 mergeWithPrev =
true;
6787 UnregisterFreeSuballocation(nextItem);
6788 MergeFreeWithNext(suballocItem);
6793 UnregisterFreeSuballocation(prevItem);
6794 MergeFreeWithNext(prevItem);
6795 RegisterFreeSuballocation(prevItem);
6800 RegisterFreeSuballocation(suballocItem);
6801 return suballocItem;
6805 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
6807 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6808 VMA_ASSERT(item->size > 0);
6812 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6814 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6816 if(m_FreeSuballocationsBySize.empty())
6818 m_FreeSuballocationsBySize.push_back(item);
6822 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
6830 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
6832 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6833 VMA_ASSERT(item->size > 0);
6837 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6839 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6841 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6842 m_FreeSuballocationsBySize.data(),
6843 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
6845 VmaSuballocationItemSizeLess());
6846 for(
size_t index = it - m_FreeSuballocationsBySize.data();
6847 index < m_FreeSuballocationsBySize.size();
6850 if(m_FreeSuballocationsBySize[index] == item)
6852 VmaVectorRemove(m_FreeSuballocationsBySize, index);
6855 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
6857 VMA_ASSERT(0 &&
"Not found.");
6866 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
6867 m_Metadata(hAllocator),
6868 m_MemoryTypeIndex(UINT32_MAX),
6870 m_hMemory(VK_NULL_HANDLE),
6872 m_pMappedData(VMA_NULL)
6876 void VmaDeviceMemoryBlock::Init(
6877 uint32_t newMemoryTypeIndex,
6878 VkDeviceMemory newMemory,
6879 VkDeviceSize newSize,
6882 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6884 m_MemoryTypeIndex = newMemoryTypeIndex;
6886 m_hMemory = newMemory;
6888 m_Metadata.Init(newSize);
6891 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
6895 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6897 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6898 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6899 m_hMemory = VK_NULL_HANDLE;
6902 bool VmaDeviceMemoryBlock::Validate()
const 6904 if((m_hMemory == VK_NULL_HANDLE) ||
6905 (m_Metadata.GetSize() == 0))
6910 return m_Metadata.Validate();
6913 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
6915 void* pData =
nullptr;
6916 VkResult res = Map(hAllocator, 1, &pData);
6917 if(res != VK_SUCCESS)
6922 res = m_Metadata.CheckCorruption(pData);
6924 Unmap(hAllocator, 1);
6929 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
6936 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6939 m_MapCount += count;
6940 VMA_ASSERT(m_pMappedData != VMA_NULL);
6941 if(ppData != VMA_NULL)
6943 *ppData = m_pMappedData;
6949 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6950 hAllocator->m_hDevice,
6956 if(result == VK_SUCCESS)
6958 if(ppData != VMA_NULL)
6960 *ppData = m_pMappedData;
6968 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
6975 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6976 if(m_MapCount >= count)
6978 m_MapCount -= count;
6981 m_pMappedData = VMA_NULL;
6982 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
6987 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6991 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
6993 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
6994 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
6997 VkResult res = Map(hAllocator, 1, &pData);
6998 if(res != VK_SUCCESS)
7003 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
7004 VmaWriteMagicValue(pData, allocOffset + allocSize);
7006 Unmap(hAllocator, 1);
7011 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
7013 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
7014 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
7017 VkResult res = Map(hAllocator, 1, &pData);
7018 if(res != VK_SUCCESS)
7023 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
7025 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
7027 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
7029 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
7032 Unmap(hAllocator, 1);
7037 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
7042 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
7043 hAllocation->GetBlock() ==
this);
7045 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
7046 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
7047 hAllocator->m_hDevice,
7050 hAllocation->GetOffset());
7053 VkResult VmaDeviceMemoryBlock::BindImageMemory(
7058 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
7059 hAllocation->GetBlock() ==
this);
7061 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
7062 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
7063 hAllocator->m_hDevice,
7066 hAllocation->GetOffset());
7071 memset(&outInfo, 0,
sizeof(outInfo));
7090 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
7098 VmaPool_T::VmaPool_T(
7103 createInfo.memoryTypeIndex,
7104 createInfo.blockSize,
7105 createInfo.minBlockCount,
7106 createInfo.maxBlockCount,
7108 createInfo.frameInUseCount,
7114 VmaPool_T::~VmaPool_T()
7118 #if VMA_STATS_STRING_ENABLED 7120 #endif // #if VMA_STATS_STRING_ENABLED 7122 VmaBlockVector::VmaBlockVector(
7124 uint32_t memoryTypeIndex,
7125 VkDeviceSize preferredBlockSize,
7126 size_t minBlockCount,
7127 size_t maxBlockCount,
7128 VkDeviceSize bufferImageGranularity,
7129 uint32_t frameInUseCount,
7130 bool isCustomPool) :
7131 m_hAllocator(hAllocator),
7132 m_MemoryTypeIndex(memoryTypeIndex),
7133 m_PreferredBlockSize(preferredBlockSize),
7134 m_MinBlockCount(minBlockCount),
7135 m_MaxBlockCount(maxBlockCount),
7136 m_BufferImageGranularity(bufferImageGranularity),
7137 m_FrameInUseCount(frameInUseCount),
7138 m_IsCustomPool(isCustomPool),
7139 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
7140 m_HasEmptyBlock(false),
7141 m_pDefragmentator(VMA_NULL),
7146 VmaBlockVector::~VmaBlockVector()
7148 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
7150 for(
size_t i = m_Blocks.size(); i--; )
7152 m_Blocks[i]->Destroy(m_hAllocator);
7153 vma_delete(m_hAllocator, m_Blocks[i]);
7157 VkResult VmaBlockVector::CreateMinBlocks()
7159 for(
size_t i = 0; i < m_MinBlockCount; ++i)
7161 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
7162 if(res != VK_SUCCESS)
7170 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
7178 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7180 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7182 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7184 VMA_HEAVY_ASSERT(pBlock->Validate());
7185 pBlock->m_Metadata.AddPoolStats(*pStats);
7189 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 7191 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
7192 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
7193 (VMA_DEBUG_MARGIN > 0) &&
7194 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
7197 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
7199 VkResult VmaBlockVector::Allocate(
7201 uint32_t currentFrameIndex,
7203 VkDeviceSize alignment,
7205 VmaSuballocationType suballocType,
7209 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
7211 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7217 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7221 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
7223 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
7224 VMA_ASSERT(pCurrBlock);
7225 VmaAllocationRequest currRequest = {};
7226 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
7229 m_BufferImageGranularity,
7237 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
7241 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
7242 if(res != VK_SUCCESS)
7249 if(pCurrBlock->m_Metadata.IsEmpty())
7251 m_HasEmptyBlock =
false;
7254 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
7255 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, size, *pAllocation);
7256 (*pAllocation)->InitBlockAllocation(
7265 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
7266 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
7267 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
7268 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
7270 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
7272 if(IsCorruptionDetectionEnabled())
7274 VkResult res = pCurrBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
7275 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
7281 const bool canCreateNewBlock =
7283 (m_Blocks.size() < m_MaxBlockCount);
7286 if(canCreateNewBlock)
7289 VkDeviceSize newBlockSize = m_PreferredBlockSize;
7290 uint32_t newBlockSizeShift = 0;
7291 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
7295 if(m_IsCustomPool ==
false)
7298 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
7299 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
7301 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
7302 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
7304 newBlockSize = smallerNewBlockSize;
7305 ++newBlockSizeShift;
7314 size_t newBlockIndex = 0;
7315 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
7317 if(m_IsCustomPool ==
false)
7319 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
7321 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
7322 if(smallerNewBlockSize >= size)
7324 newBlockSize = smallerNewBlockSize;
7325 ++newBlockSizeShift;
7326 res = CreateBlock(newBlockSize, &newBlockIndex);
7335 if(res == VK_SUCCESS)
7337 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
7338 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= size);
7342 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
7343 if(res != VK_SUCCESS)
7350 VmaAllocationRequest allocRequest;
7351 if(pBlock->m_Metadata.CreateAllocationRequest(
7354 m_BufferImageGranularity,
7361 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
7362 pBlock->m_Metadata.Alloc(allocRequest, suballocType, size, *pAllocation);
7363 (*pAllocation)->InitBlockAllocation(
7366 allocRequest.offset,
7372 VMA_HEAVY_ASSERT(pBlock->Validate());
7373 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
7374 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
7375 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
7377 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
7379 if(IsCorruptionDetectionEnabled())
7381 res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, allocRequest.offset, size);
7382 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
7389 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7397 if(canMakeOtherLost)
7399 uint32_t tryIndex = 0;
7400 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
7402 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
7403 VmaAllocationRequest bestRequest = {};
7404 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
7408 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
7410 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
7411 VMA_ASSERT(pCurrBlock);
7412 VmaAllocationRequest currRequest = {};
7413 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
7416 m_BufferImageGranularity,
7423 const VkDeviceSize currRequestCost = currRequest.CalcCost();
7424 if(pBestRequestBlock == VMA_NULL ||
7425 currRequestCost < bestRequestCost)
7427 pBestRequestBlock = pCurrBlock;
7428 bestRequest = currRequest;
7429 bestRequestCost = currRequestCost;
7431 if(bestRequestCost == 0)
7439 if(pBestRequestBlock != VMA_NULL)
7443 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
7444 if(res != VK_SUCCESS)
7450 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
7456 if(pBestRequestBlock->m_Metadata.IsEmpty())
7458 m_HasEmptyBlock =
false;
7461 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
7462 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, size, *pAllocation);
7463 (*pAllocation)->InitBlockAllocation(
7472 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
7473 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
7474 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
7475 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
7477 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
7479 if(IsCorruptionDetectionEnabled())
7481 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
7482 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
7497 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
7499 return VK_ERROR_TOO_MANY_OBJECTS;
7503 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7506 void VmaBlockVector::Free(
7509 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
7513 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7515 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
7517 if(IsCorruptionDetectionEnabled())
7519 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
7520 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
7523 if(hAllocation->IsPersistentMap())
7525 pBlock->Unmap(m_hAllocator, 1);
7528 pBlock->m_Metadata.Free(hAllocation);
7529 VMA_HEAVY_ASSERT(pBlock->Validate());
7531 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
7534 if(pBlock->m_Metadata.IsEmpty())
7537 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
7539 pBlockToDelete = pBlock;
7545 m_HasEmptyBlock =
true;
7550 else if(m_HasEmptyBlock)
7552 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
7553 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
7555 pBlockToDelete = pLastBlock;
7556 m_Blocks.pop_back();
7557 m_HasEmptyBlock =
false;
7561 IncrementallySortBlocks();
7566 if(pBlockToDelete != VMA_NULL)
7568 VMA_DEBUG_LOG(
" Deleted empty allocation");
7569 pBlockToDelete->Destroy(m_hAllocator);
7570 vma_delete(m_hAllocator, pBlockToDelete);
7574 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 7576 VkDeviceSize result = 0;
7577 for(
size_t i = m_Blocks.size(); i--; )
7579 result = VMA_MAX(result, m_Blocks[i]->m_Metadata.GetSize());
7580 if(result >= m_PreferredBlockSize)
7588 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
7590 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7592 if(m_Blocks[blockIndex] == pBlock)
7594 VmaVectorRemove(m_Blocks, blockIndex);
7601 void VmaBlockVector::IncrementallySortBlocks()
7604 for(
size_t i = 1; i < m_Blocks.size(); ++i)
7606 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
7608 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
7614 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
7616 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7617 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
7618 allocInfo.allocationSize = blockSize;
7619 VkDeviceMemory mem = VK_NULL_HANDLE;
7620 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
7629 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
7633 allocInfo.allocationSize,
7636 m_Blocks.push_back(pBlock);
7637 if(pNewBlockIndex != VMA_NULL)
7639 *pNewBlockIndex = m_Blocks.size() - 1;
7645 #if VMA_STATS_STRING_ENABLED 7647 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
7649 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7655 json.WriteString(
"MemoryTypeIndex");
7656 json.WriteNumber(m_MemoryTypeIndex);
7658 json.WriteString(
"BlockSize");
7659 json.WriteNumber(m_PreferredBlockSize);
7661 json.WriteString(
"BlockCount");
7662 json.BeginObject(
true);
7663 if(m_MinBlockCount > 0)
7665 json.WriteString(
"Min");
7666 json.WriteNumber((uint64_t)m_MinBlockCount);
7668 if(m_MaxBlockCount < SIZE_MAX)
7670 json.WriteString(
"Max");
7671 json.WriteNumber((uint64_t)m_MaxBlockCount);
7673 json.WriteString(
"Cur");
7674 json.WriteNumber((uint64_t)m_Blocks.size());
7677 if(m_FrameInUseCount > 0)
7679 json.WriteString(
"FrameInUseCount");
7680 json.WriteNumber(m_FrameInUseCount);
7685 json.WriteString(
"PreferredBlockSize");
7686 json.WriteNumber(m_PreferredBlockSize);
7689 json.WriteString(
"Blocks");
7691 for(
size_t i = 0; i < m_Blocks.size(); ++i)
7694 json.ContinueString(m_Blocks[i]->GetId());
7697 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
7704 #endif // #if VMA_STATS_STRING_ENABLED 7706 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
7708 uint32_t currentFrameIndex)
7710 if(m_pDefragmentator == VMA_NULL)
7712 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
7718 return m_pDefragmentator;
7721 VkResult VmaBlockVector::Defragment(
7723 VkDeviceSize& maxBytesToMove,
7724 uint32_t& maxAllocationsToMove)
7726 if(m_pDefragmentator == VMA_NULL)
7731 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7734 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
7737 if(pDefragmentationStats != VMA_NULL)
7739 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
7740 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
7743 VMA_ASSERT(bytesMoved <= maxBytesToMove);
7744 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
7750 m_HasEmptyBlock =
false;
7751 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
7753 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
7754 if(pBlock->m_Metadata.IsEmpty())
7756 if(m_Blocks.size() > m_MinBlockCount)
7758 if(pDefragmentationStats != VMA_NULL)
7761 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
7764 VmaVectorRemove(m_Blocks, blockIndex);
7765 pBlock->Destroy(m_hAllocator);
7766 vma_delete(m_hAllocator, pBlock);
7770 m_HasEmptyBlock =
true;
7778 void VmaBlockVector::DestroyDefragmentator()
7780 if(m_pDefragmentator != VMA_NULL)
7782 vma_delete(m_hAllocator, m_pDefragmentator);
7783 m_pDefragmentator = VMA_NULL;
7787 void VmaBlockVector::MakePoolAllocationsLost(
7788 uint32_t currentFrameIndex,
7789 size_t* pLostAllocationCount)
7791 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7792 size_t lostAllocationCount = 0;
7793 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7795 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7797 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
7799 if(pLostAllocationCount != VMA_NULL)
7801 *pLostAllocationCount = lostAllocationCount;
7805 VkResult VmaBlockVector::CheckCorruption()
7807 if(!IsCorruptionDetectionEnabled())
7809 return VK_ERROR_FEATURE_NOT_PRESENT;
7812 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7813 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7815 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7817 VkResult res = pBlock->CheckCorruption(m_hAllocator);
7818 if(res != VK_SUCCESS)
7826 void VmaBlockVector::AddStats(
VmaStats* pStats)
7828 const uint32_t memTypeIndex = m_MemoryTypeIndex;
7829 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
7831 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7833 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7835 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7837 VMA_HEAVY_ASSERT(pBlock->Validate());
7839 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
7840 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7841 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7842 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7849 VmaDefragmentator::VmaDefragmentator(
7851 VmaBlockVector* pBlockVector,
7852 uint32_t currentFrameIndex) :
7853 m_hAllocator(hAllocator),
7854 m_pBlockVector(pBlockVector),
7855 m_CurrentFrameIndex(currentFrameIndex),
7857 m_AllocationsMoved(0),
7858 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
7859 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
7863 VmaDefragmentator::~VmaDefragmentator()
7865 for(
size_t i = m_Blocks.size(); i--; )
7867 vma_delete(m_hAllocator, m_Blocks[i]);
7871 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
7873 AllocationInfo allocInfo;
7874 allocInfo.m_hAllocation = hAlloc;
7875 allocInfo.m_pChanged = pChanged;
7876 m_Allocations.push_back(allocInfo);
7879 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
7882 if(m_pMappedDataForDefragmentation)
7884 *ppMappedData = m_pMappedDataForDefragmentation;
7889 if(m_pBlock->GetMappedData())
7891 *ppMappedData = m_pBlock->GetMappedData();
7896 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
7897 *ppMappedData = m_pMappedDataForDefragmentation;
7901 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
7903 if(m_pMappedDataForDefragmentation != VMA_NULL)
7905 m_pBlock->Unmap(hAllocator, 1);
7909 VkResult VmaDefragmentator::DefragmentRound(
7910 VkDeviceSize maxBytesToMove,
7911 uint32_t maxAllocationsToMove)
7913 if(m_Blocks.empty())
7918 size_t srcBlockIndex = m_Blocks.size() - 1;
7919 size_t srcAllocIndex = SIZE_MAX;
7925 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
7927 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
7930 if(srcBlockIndex == 0)
7937 srcAllocIndex = SIZE_MAX;
7942 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
7946 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
7947 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
7949 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
7950 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
7951 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
7952 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
7955 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
7957 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
7958 VmaAllocationRequest dstAllocRequest;
7959 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
7960 m_CurrentFrameIndex,
7961 m_pBlockVector->GetFrameInUseCount(),
7962 m_pBlockVector->GetBufferImageGranularity(),
7967 &dstAllocRequest) &&
7969 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
7971 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
7974 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
7975 (m_BytesMoved + size > maxBytesToMove))
7977 return VK_INCOMPLETE;
7980 void* pDstMappedData = VMA_NULL;
7981 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
7982 if(res != VK_SUCCESS)
7987 void* pSrcMappedData = VMA_NULL;
7988 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
7989 if(res != VK_SUCCESS)
7996 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
7997 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
7998 static_cast<size_t>(size));
8000 if(VMA_DEBUG_MARGIN > 0)
8002 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
8003 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
8006 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
8007 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
8009 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
8011 if(allocInfo.m_pChanged != VMA_NULL)
8013 *allocInfo.m_pChanged = VK_TRUE;
8016 ++m_AllocationsMoved;
8017 m_BytesMoved += size;
8019 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
8027 if(srcAllocIndex > 0)
8033 if(srcBlockIndex > 0)
8036 srcAllocIndex = SIZE_MAX;
8046 VkResult VmaDefragmentator::Defragment(
8047 VkDeviceSize maxBytesToMove,
8048 uint32_t maxAllocationsToMove)
8050 if(m_Allocations.empty())
8056 const size_t blockCount = m_pBlockVector->m_Blocks.size();
8057 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
8059 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
8060 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
8061 m_Blocks.push_back(pBlockInfo);
8065 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
8068 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
8070 AllocationInfo& allocInfo = m_Allocations[blockIndex];
8072 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
8074 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
8075 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
8076 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
8078 (*it)->m_Allocations.push_back(allocInfo);
8086 m_Allocations.clear();
8088 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
8090 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
8091 pBlockInfo->CalcHasNonMovableAllocations();
8092 pBlockInfo->SortAllocationsBySizeDescecnding();
8096 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
8099 VkResult result = VK_SUCCESS;
8100 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
8102 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
8106 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
8108 m_Blocks[blockIndex]->Unmap(m_hAllocator);
8114 bool VmaDefragmentator::MoveMakesSense(
8115 size_t dstBlockIndex, VkDeviceSize dstOffset,
8116 size_t srcBlockIndex, VkDeviceSize srcOffset)
8118 if(dstBlockIndex < srcBlockIndex)
8122 if(dstBlockIndex > srcBlockIndex)
8126 if(dstOffset < srcOffset)
8136 #if VMA_RECORDING_ENABLED 8138 VmaRecorder::VmaRecorder() :
8143 m_StartCounter(INT64_MAX)
8149 m_UseMutex = useMutex;
8150 m_Flags = settings.
flags;
8152 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
8153 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
8156 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
8159 return VK_ERROR_INITIALIZATION_FAILED;
8163 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
8164 fprintf(m_File,
"%s\n",
"1,2");
8169 VmaRecorder::~VmaRecorder()
8171 if(m_File != VMA_NULL)
8177 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
8179 CallParams callParams;
8180 GetBasicParams(callParams);
8182 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8183 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
8187 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
8189 CallParams callParams;
8190 GetBasicParams(callParams);
8192 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8193 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
8199 CallParams callParams;
8200 GetBasicParams(callParams);
8202 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8203 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
8214 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
8216 CallParams callParams;
8217 GetBasicParams(callParams);
8219 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8220 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
8225 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
8226 const VkMemoryRequirements& vkMemReq,
8230 CallParams callParams;
8231 GetBasicParams(callParams);
8233 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8234 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
8235 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
8238 vkMemReq.memoryTypeBits,
8246 userDataStr.GetString());
8250 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
8251 const VkMemoryRequirements& vkMemReq,
8252 bool requiresDedicatedAllocation,
8253 bool prefersDedicatedAllocation,
8257 CallParams callParams;
8258 GetBasicParams(callParams);
8260 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8261 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
8262 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
8265 vkMemReq.memoryTypeBits,
8266 requiresDedicatedAllocation ? 1 : 0,
8267 prefersDedicatedAllocation ? 1 : 0,
8275 userDataStr.GetString());
8279 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
8280 const VkMemoryRequirements& vkMemReq,
8281 bool requiresDedicatedAllocation,
8282 bool prefersDedicatedAllocation,
8286 CallParams callParams;
8287 GetBasicParams(callParams);
8289 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8290 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
8291 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
8294 vkMemReq.memoryTypeBits,
8295 requiresDedicatedAllocation ? 1 : 0,
8296 prefersDedicatedAllocation ? 1 : 0,
8304 userDataStr.GetString());
8308 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
8311 CallParams callParams;
8312 GetBasicParams(callParams);
8314 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8315 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
8320 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
8322 const void* pUserData)
8324 CallParams callParams;
8325 GetBasicParams(callParams);
8327 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8328 UserDataString userDataStr(
8331 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
8333 userDataStr.GetString());
8337 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
8340 CallParams callParams;
8341 GetBasicParams(callParams);
8343 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8344 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
8349 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
8352 CallParams callParams;
8353 GetBasicParams(callParams);
8355 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8356 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
8361 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
8364 CallParams callParams;
8365 GetBasicParams(callParams);
8367 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8368 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
8373 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
8374 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
8376 CallParams callParams;
8377 GetBasicParams(callParams);
8379 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8380 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
8387 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
8388 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
8390 CallParams callParams;
8391 GetBasicParams(callParams);
8393 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8394 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
8401 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
8402 const VkBufferCreateInfo& bufCreateInfo,
8406 CallParams callParams;
8407 GetBasicParams(callParams);
8409 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8410 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
8411 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
8412 bufCreateInfo.flags,
8414 bufCreateInfo.usage,
8415 bufCreateInfo.sharingMode,
8416 allocCreateInfo.
flags,
8417 allocCreateInfo.
usage,
8421 allocCreateInfo.
pool,
8423 userDataStr.GetString());
8427 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
8428 const VkImageCreateInfo& imageCreateInfo,
8432 CallParams callParams;
8433 GetBasicParams(callParams);
8435 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8436 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
8437 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
8438 imageCreateInfo.flags,
8439 imageCreateInfo.imageType,
8440 imageCreateInfo.format,
8441 imageCreateInfo.extent.width,
8442 imageCreateInfo.extent.height,
8443 imageCreateInfo.extent.depth,
8444 imageCreateInfo.mipLevels,
8445 imageCreateInfo.arrayLayers,
8446 imageCreateInfo.samples,
8447 imageCreateInfo.tiling,
8448 imageCreateInfo.usage,
8449 imageCreateInfo.sharingMode,
8450 imageCreateInfo.initialLayout,
8451 allocCreateInfo.
flags,
8452 allocCreateInfo.
usage,
8456 allocCreateInfo.
pool,
8458 userDataStr.GetString());
8462 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
8465 CallParams callParams;
8466 GetBasicParams(callParams);
8468 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8469 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
8474 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
8477 CallParams callParams;
8478 GetBasicParams(callParams);
8480 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8481 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
8486 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
8489 CallParams callParams;
8490 GetBasicParams(callParams);
8492 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8493 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
8498 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
8501 CallParams callParams;
8502 GetBasicParams(callParams);
8504 VmaMutexLock lock(m_FileMutex, m_UseMutex);
8505 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
8512 if(pUserData != VMA_NULL)
8516 m_Str = (
const char*)pUserData;
8520 sprintf_s(m_PtrStr,
"%p", pUserData);
8530 void VmaRecorder::GetBasicParams(CallParams& outParams)
8532 outParams.threadId = GetCurrentThreadId();
8534 LARGE_INTEGER counter;
8535 QueryPerformanceCounter(&counter);
8536 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
8539 void VmaRecorder::Flush()
8547 #endif // #if VMA_RECORDING_ENABLED 8555 m_hDevice(pCreateInfo->device),
8556 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
8557 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
8558 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
8559 m_PreferredLargeHeapBlockSize(0),
8560 m_PhysicalDevice(pCreateInfo->physicalDevice),
8561 m_CurrentFrameIndex(0),
8562 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
8565 ,m_pRecorder(VMA_NULL)
8568 if(VMA_DEBUG_DETECT_CORRUPTION)
8571 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
8576 #if !(VMA_DEDICATED_ALLOCATION) 8579 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
8583 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
8584 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
8585 memset(&m_MemProps, 0,
sizeof(m_MemProps));
8587 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
8588 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
8590 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
8592 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
8603 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
8604 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
8611 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
8613 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
8614 if(limit != VK_WHOLE_SIZE)
8616 m_HeapSizeLimit[heapIndex] = limit;
8617 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
8619 m_MemProps.memoryHeaps[heapIndex].size = limit;
8625 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8627 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
8629 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
8635 GetBufferImageGranularity(),
8640 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
8647 VkResult res = VK_SUCCESS;
8652 #if VMA_RECORDING_ENABLED 8653 m_pRecorder = vma_new(
this, VmaRecorder)();
8655 if(res != VK_SUCCESS)
8659 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
8661 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
8662 return VK_ERROR_FEATURE_NOT_PRESENT;
8669 VmaAllocator_T::~VmaAllocator_T()
8671 #if VMA_RECORDING_ENABLED 8672 if(m_pRecorder != VMA_NULL)
8674 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
8675 vma_delete(
this, m_pRecorder);
8679 VMA_ASSERT(m_Pools.empty());
8681 for(
size_t i = GetMemoryTypeCount(); i--; )
8683 vma_delete(
this, m_pDedicatedAllocations[i]);
8684 vma_delete(
this, m_pBlockVectors[i]);
8688 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
8690 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 8691 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
8692 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
8693 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
8694 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
8695 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
8696 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
8697 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
8698 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
8699 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
8700 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
8701 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
8702 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
8703 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
8704 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
8705 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
8706 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
8707 #if VMA_DEDICATED_ALLOCATION 8708 if(m_UseKhrDedicatedAllocation)
8710 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
8711 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
8712 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
8713 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
8715 #endif // #if VMA_DEDICATED_ALLOCATION 8716 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 8718 #define VMA_COPY_IF_NOT_NULL(funcName) \ 8719 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 8721 if(pVulkanFunctions != VMA_NULL)
8723 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
8724 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
8725 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
8726 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
8727 VMA_COPY_IF_NOT_NULL(vkMapMemory);
8728 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
8729 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
8730 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
8731 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
8732 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
8733 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
8734 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
8735 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
8736 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
8737 VMA_COPY_IF_NOT_NULL(vkCreateImage);
8738 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
8739 #if VMA_DEDICATED_ALLOCATION 8740 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
8741 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
8745 #undef VMA_COPY_IF_NOT_NULL 8749 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
8750 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
8751 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
8752 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
8753 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
8754 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
8755 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
8756 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
8757 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
8758 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
8759 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
8760 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
8761 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
8762 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
8763 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
8764 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
8765 #if VMA_DEDICATED_ALLOCATION 8766 if(m_UseKhrDedicatedAllocation)
8768 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
8769 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
8774 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
8776 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
8777 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
8778 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
8779 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
8782 VkResult VmaAllocator_T::AllocateMemoryOfType(
8784 VkDeviceSize alignment,
8785 bool dedicatedAllocation,
8786 VkBuffer dedicatedBuffer,
8787 VkImage dedicatedImage,
8789 uint32_t memTypeIndex,
8790 VmaSuballocationType suballocType,
8793 VMA_ASSERT(pAllocation != VMA_NULL);
8794 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
8800 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
8805 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
8806 VMA_ASSERT(blockVector);
8808 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
8809 bool preferDedicatedMemory =
8810 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
8811 dedicatedAllocation ||
8813 size > preferredBlockSize / 2;
8815 if(preferDedicatedMemory &&
8817 finalCreateInfo.
pool == VK_NULL_HANDLE)
8826 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8830 return AllocateDedicatedMemory(
8844 VkResult res = blockVector->Allocate(
8846 m_CurrentFrameIndex.load(),
8852 if(res == VK_SUCCESS)
8860 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8864 res = AllocateDedicatedMemory(
8870 finalCreateInfo.pUserData,
8874 if(res == VK_SUCCESS)
8877 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
8883 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
8890 VkResult VmaAllocator_T::AllocateDedicatedMemory(
8892 VmaSuballocationType suballocType,
8893 uint32_t memTypeIndex,
8895 bool isUserDataString,
8897 VkBuffer dedicatedBuffer,
8898 VkImage dedicatedImage,
8901 VMA_ASSERT(pAllocation);
8903 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
8904 allocInfo.memoryTypeIndex = memTypeIndex;
8905 allocInfo.allocationSize = size;
8907 #if VMA_DEDICATED_ALLOCATION 8908 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
8909 if(m_UseKhrDedicatedAllocation)
8911 if(dedicatedBuffer != VK_NULL_HANDLE)
8913 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
8914 dedicatedAllocInfo.buffer = dedicatedBuffer;
8915 allocInfo.pNext = &dedicatedAllocInfo;
8917 else if(dedicatedImage != VK_NULL_HANDLE)
8919 dedicatedAllocInfo.image = dedicatedImage;
8920 allocInfo.pNext = &dedicatedAllocInfo;
8923 #endif // #if VMA_DEDICATED_ALLOCATION 8926 VkDeviceMemory hMemory = VK_NULL_HANDLE;
8927 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
8930 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
8934 void* pMappedData = VMA_NULL;
8937 res = (*m_VulkanFunctions.vkMapMemory)(
8946 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
8947 FreeVulkanMemory(memTypeIndex, size, hMemory);
8952 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
8953 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
8954 (*pAllocation)->SetUserData(
this, pUserData);
8955 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
8957 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
8962 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8963 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8964 VMA_ASSERT(pDedicatedAllocations);
8965 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
8968 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
8973 void VmaAllocator_T::GetBufferMemoryRequirements(
8975 VkMemoryRequirements& memReq,
8976 bool& requiresDedicatedAllocation,
8977 bool& prefersDedicatedAllocation)
const 8979 #if VMA_DEDICATED_ALLOCATION 8980 if(m_UseKhrDedicatedAllocation)
8982 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
8983 memReqInfo.buffer = hBuffer;
8985 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
8987 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
8988 memReq2.pNext = &memDedicatedReq;
8990 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
8992 memReq = memReq2.memoryRequirements;
8993 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
8994 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
8997 #endif // #if VMA_DEDICATED_ALLOCATION 8999 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
9000 requiresDedicatedAllocation =
false;
9001 prefersDedicatedAllocation =
false;
9005 void VmaAllocator_T::GetImageMemoryRequirements(
9007 VkMemoryRequirements& memReq,
9008 bool& requiresDedicatedAllocation,
9009 bool& prefersDedicatedAllocation)
const 9011 #if VMA_DEDICATED_ALLOCATION 9012 if(m_UseKhrDedicatedAllocation)
9014 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
9015 memReqInfo.image = hImage;
9017 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
9019 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
9020 memReq2.pNext = &memDedicatedReq;
9022 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
9024 memReq = memReq2.memoryRequirements;
9025 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
9026 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
9029 #endif // #if VMA_DEDICATED_ALLOCATION 9031 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
9032 requiresDedicatedAllocation =
false;
9033 prefersDedicatedAllocation =
false;
9037 VkResult VmaAllocator_T::AllocateMemory(
9038 const VkMemoryRequirements& vkMemReq,
9039 bool requiresDedicatedAllocation,
9040 bool prefersDedicatedAllocation,
9041 VkBuffer dedicatedBuffer,
9042 VkImage dedicatedImage,
9044 VmaSuballocationType suballocType,
9050 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
9051 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9056 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
9057 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9059 if(requiresDedicatedAllocation)
9063 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
9064 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9066 if(createInfo.
pool != VK_NULL_HANDLE)
9068 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
9069 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9072 if((createInfo.
pool != VK_NULL_HANDLE) &&
9075 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
9076 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9079 if(createInfo.
pool != VK_NULL_HANDLE)
9081 const VkDeviceSize alignmentForPool = VMA_MAX(
9083 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
9084 return createInfo.
pool->m_BlockVector.Allocate(
9086 m_CurrentFrameIndex.load(),
9096 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
9097 uint32_t memTypeIndex = UINT32_MAX;
9099 if(res == VK_SUCCESS)
9101 VkDeviceSize alignmentForMemType = VMA_MAX(
9103 GetMemoryTypeMinAlignment(memTypeIndex));
9105 res = AllocateMemoryOfType(
9107 alignmentForMemType,
9108 requiresDedicatedAllocation || prefersDedicatedAllocation,
9116 if(res == VK_SUCCESS)
9126 memoryTypeBits &= ~(1u << memTypeIndex);
9129 if(res == VK_SUCCESS)
9131 alignmentForMemType = VMA_MAX(
9133 GetMemoryTypeMinAlignment(memTypeIndex));
9135 res = AllocateMemoryOfType(
9137 alignmentForMemType,
9138 requiresDedicatedAllocation || prefersDedicatedAllocation,
9146 if(res == VK_SUCCESS)
9156 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9167 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
9169 VMA_ASSERT(allocation);
9171 if(allocation->CanBecomeLost() ==
false ||
9172 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
9174 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
9176 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
9179 switch(allocation->GetType())
9181 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9183 VmaBlockVector* pBlockVector = VMA_NULL;
9184 VmaPool hPool = allocation->GetPool();
9185 if(hPool != VK_NULL_HANDLE)
9187 pBlockVector = &hPool->m_BlockVector;
9191 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
9192 pBlockVector = m_pBlockVectors[memTypeIndex];
9194 pBlockVector->Free(allocation);
9197 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9198 FreeDedicatedMemory(allocation);
9205 allocation->SetUserData(
this, VMA_NULL);
9206 vma_delete(
this, allocation);
9209 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
9212 InitStatInfo(pStats->
total);
9213 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
9215 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
9219 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
9221 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
9222 VMA_ASSERT(pBlockVector);
9223 pBlockVector->AddStats(pStats);
9228 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
9229 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
9231 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
9236 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
9238 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
9239 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
9240 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
9241 VMA_ASSERT(pDedicatedAllocVector);
9242 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
9245 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
9246 VmaAddStatInfo(pStats->
total, allocationStatInfo);
9247 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
9248 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
9253 VmaPostprocessCalcStatInfo(pStats->
total);
9254 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
9255 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
9256 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
9257 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
9260 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
9262 VkResult VmaAllocator_T::Defragment(
9264 size_t allocationCount,
9265 VkBool32* pAllocationsChanged,
9269 if(pAllocationsChanged != VMA_NULL)
9271 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
9273 if(pDefragmentationStats != VMA_NULL)
9275 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
9278 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
9280 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
9282 const size_t poolCount = m_Pools.size();
9285 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
9289 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
9291 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
9293 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
9295 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
9297 VmaBlockVector* pAllocBlockVector = VMA_NULL;
9299 const VmaPool hAllocPool = hAlloc->GetPool();
9301 if(hAllocPool != VK_NULL_HANDLE)
9303 pAllocBlockVector = &hAllocPool->GetBlockVector();
9308 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
9311 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
9313 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
9314 &pAllocationsChanged[allocIndex] : VMA_NULL;
9315 pDefragmentator->AddAllocation(hAlloc, pChanged);
9319 VkResult result = VK_SUCCESS;
9323 VkDeviceSize maxBytesToMove = SIZE_MAX;
9324 uint32_t maxAllocationsToMove = UINT32_MAX;
9325 if(pDefragmentationInfo != VMA_NULL)
9332 for(uint32_t memTypeIndex = 0;
9333 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
9337 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
9339 result = m_pBlockVectors[memTypeIndex]->Defragment(
9340 pDefragmentationStats,
9342 maxAllocationsToMove);
9347 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
9349 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
9350 pDefragmentationStats,
9352 maxAllocationsToMove);
9358 for(
size_t poolIndex = poolCount; poolIndex--; )
9360 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
9364 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
9366 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
9368 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
9377 if(hAllocation->CanBecomeLost())
9383 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
9384 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
9387 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
9391 pAllocationInfo->
offset = 0;
9392 pAllocationInfo->
size = hAllocation->GetSize();
9394 pAllocationInfo->
pUserData = hAllocation->GetUserData();
9397 else if(localLastUseFrameIndex == localCurrFrameIndex)
9399 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
9400 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
9401 pAllocationInfo->
offset = hAllocation->GetOffset();
9402 pAllocationInfo->
size = hAllocation->GetSize();
9404 pAllocationInfo->
pUserData = hAllocation->GetUserData();
9409 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
9411 localLastUseFrameIndex = localCurrFrameIndex;
9418 #if VMA_STATS_STRING_ENABLED 9419 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
9420 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
9423 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
9424 if(localLastUseFrameIndex == localCurrFrameIndex)
9430 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
9432 localLastUseFrameIndex = localCurrFrameIndex;
9438 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
9439 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
9440 pAllocationInfo->
offset = hAllocation->GetOffset();
9441 pAllocationInfo->
size = hAllocation->GetSize();
9442 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
9443 pAllocationInfo->
pUserData = hAllocation->GetUserData();
9447 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
9450 if(hAllocation->CanBecomeLost())
9452 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
9453 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
9456 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
9460 else if(localLastUseFrameIndex == localCurrFrameIndex)
9466 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
9468 localLastUseFrameIndex = localCurrFrameIndex;
9475 #if VMA_STATS_STRING_ENABLED 9476 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
9477 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
9480 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
9481 if(localLastUseFrameIndex == localCurrFrameIndex)
9487 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
9489 localLastUseFrameIndex = localCurrFrameIndex;
9501 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
9514 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
9516 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
9517 if(res != VK_SUCCESS)
9519 vma_delete(
this, *pPool);
9526 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
9527 (*pPool)->SetId(m_NextPoolId++);
9528 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
9534 void VmaAllocator_T::DestroyPool(
VmaPool pool)
9538 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
9539 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
9540 VMA_ASSERT(success &&
"Pool not found in Allocator.");
9543 vma_delete(
this, pool);
9548 pool->m_BlockVector.GetPoolStats(pPoolStats);
9551 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
9553 m_CurrentFrameIndex.store(frameIndex);
9556 void VmaAllocator_T::MakePoolAllocationsLost(
9558 size_t* pLostAllocationCount)
9560 hPool->m_BlockVector.MakePoolAllocationsLost(
9561 m_CurrentFrameIndex.load(),
9562 pLostAllocationCount);
9565 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
9567 return hPool->m_BlockVector.CheckCorruption();
9570 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
9572 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
9575 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
9577 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
9579 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
9580 VMA_ASSERT(pBlockVector);
9581 VkResult localRes = pBlockVector->CheckCorruption();
9584 case VK_ERROR_FEATURE_NOT_PRESENT:
9587 finalRes = VK_SUCCESS;
9597 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
9598 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
9600 if(((1u << m_Pools[poolIndex]->GetBlockVector().GetMemoryTypeIndex()) & memoryTypeBits) != 0)
9602 VkResult localRes = m_Pools[poolIndex]->GetBlockVector().CheckCorruption();
9605 case VK_ERROR_FEATURE_NOT_PRESENT:
9608 finalRes = VK_SUCCESS;
9620 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
9622 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
9623 (*pAllocation)->InitLost();
9626 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
9628 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
9631 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
9633 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
9634 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
9636 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
9637 if(res == VK_SUCCESS)
9639 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
9644 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
9649 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
9652 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
9654 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
9660 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
9662 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
9664 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
9667 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
9669 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
9670 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
9672 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
9673 m_HeapSizeLimit[heapIndex] += size;
9677 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
9679 if(hAllocation->CanBecomeLost())
9681 return VK_ERROR_MEMORY_MAP_FAILED;
9684 switch(hAllocation->GetType())
9686 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9688 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
9689 char *pBytes = VMA_NULL;
9690 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
9691 if(res == VK_SUCCESS)
9693 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
9694 hAllocation->BlockAllocMap();
9698 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9699 return hAllocation->DedicatedAllocMap(
this, ppData);
9702 return VK_ERROR_MEMORY_MAP_FAILED;
9708 switch(hAllocation->GetType())
9710 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9712 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
9713 hAllocation->BlockAllocUnmap();
9714 pBlock->Unmap(
this, 1);
9717 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9718 hAllocation->DedicatedAllocUnmap(
this);
9725 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
9727 VkResult res = VK_SUCCESS;
9728 switch(hAllocation->GetType())
9730 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9731 res = GetVulkanFunctions().vkBindBufferMemory(
9734 hAllocation->GetMemory(),
9737 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9739 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
9740 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
9741 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
9750 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
9752 VkResult res = VK_SUCCESS;
9753 switch(hAllocation->GetType())
9755 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9756 res = GetVulkanFunctions().vkBindImageMemory(
9759 hAllocation->GetMemory(),
9762 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9764 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
9765 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
9766 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
9775 void VmaAllocator_T::FlushOrInvalidateAllocation(
9777 VkDeviceSize offset, VkDeviceSize size,
9778 VMA_CACHE_OPERATION op)
9780 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
9781 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
9783 const VkDeviceSize allocationSize = hAllocation->GetSize();
9784 VMA_ASSERT(offset <= allocationSize);
9786 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
9788 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
9789 memRange.memory = hAllocation->GetMemory();
9791 switch(hAllocation->GetType())
9793 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9794 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
9795 if(size == VK_WHOLE_SIZE)
9797 memRange.size = allocationSize - memRange.offset;
9801 VMA_ASSERT(offset + size <= allocationSize);
9802 memRange.size = VMA_MIN(
9803 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
9804 allocationSize - memRange.offset);
9808 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9811 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
9812 if(size == VK_WHOLE_SIZE)
9814 size = allocationSize - offset;
9818 VMA_ASSERT(offset + size <= allocationSize);
9820 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
9823 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
9824 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
9825 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_Metadata.GetSize();
9826 memRange.offset += allocationOffset;
9827 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
9838 case VMA_CACHE_FLUSH:
9839 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
9841 case VMA_CACHE_INVALIDATE:
9842 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
9851 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
9853 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
9855 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
9857 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
9858 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
9859 VMA_ASSERT(pDedicatedAllocations);
9860 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
9861 VMA_ASSERT(success);
9864 VkDeviceMemory hMemory = allocation->GetMemory();
9866 if(allocation->GetMappedData() != VMA_NULL)
9868 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
9871 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
9873 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
9876 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
9878 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
9879 !hAllocation->CanBecomeLost() &&
9880 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
9882 void* pData = VMA_NULL;
9883 VkResult res = Map(hAllocation, &pData);
9884 if(res == VK_SUCCESS)
9886 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
9887 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
9892 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
9897 #if VMA_STATS_STRING_ENABLED 9899 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
9901 bool dedicatedAllocationsStarted =
false;
9902 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
9904 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
9905 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
9906 VMA_ASSERT(pDedicatedAllocVector);
9907 if(pDedicatedAllocVector->empty() ==
false)
9909 if(dedicatedAllocationsStarted ==
false)
9911 dedicatedAllocationsStarted =
true;
9912 json.WriteString(
"DedicatedAllocations");
9916 json.BeginString(
"Type ");
9917 json.ContinueString(memTypeIndex);
9922 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
9924 json.BeginObject(
true);
9926 hAlloc->PrintParameters(json);
9933 if(dedicatedAllocationsStarted)
9939 bool allocationsStarted =
false;
9940 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
9942 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
9944 if(allocationsStarted ==
false)
9946 allocationsStarted =
true;
9947 json.WriteString(
"DefaultPools");
9951 json.BeginString(
"Type ");
9952 json.ContinueString(memTypeIndex);
9955 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
9958 if(allocationsStarted)
9965 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
9966 const size_t poolCount = m_Pools.size();
9969 json.WriteString(
"Pools");
9971 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
9974 json.ContinueString(m_Pools[poolIndex]->GetId());
9977 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
9984 #endif // #if VMA_STATS_STRING_ENABLED 9993 VMA_ASSERT(pCreateInfo && pAllocator);
9994 VMA_DEBUG_LOG(
"vmaCreateAllocator");
9996 return (*pAllocator)->Init(pCreateInfo);
10002 if(allocator != VK_NULL_HANDLE)
10004 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
10005 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
10006 vma_delete(&allocationCallbacks, allocator);
10012 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
10014 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
10015 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
10020 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
10022 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
10023 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
10028 uint32_t memoryTypeIndex,
10029 VkMemoryPropertyFlags* pFlags)
10031 VMA_ASSERT(allocator && pFlags);
10032 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
10033 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
10038 uint32_t frameIndex)
10040 VMA_ASSERT(allocator);
10041 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
10043 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10045 allocator->SetCurrentFrameIndex(frameIndex);
10052 VMA_ASSERT(allocator && pStats);
10053 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10054 allocator->CalculateStats(pStats);
10057 #if VMA_STATS_STRING_ENABLED 10061 char** ppStatsString,
10062 VkBool32 detailedMap)
10064 VMA_ASSERT(allocator && ppStatsString);
10065 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10067 VmaStringBuilder sb(allocator);
10069 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
10070 json.BeginObject();
10073 allocator->CalculateStats(&stats);
10075 json.WriteString(
"Total");
10076 VmaPrintStatInfo(json, stats.
total);
10078 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
10080 json.BeginString(
"Heap ");
10081 json.ContinueString(heapIndex);
10083 json.BeginObject();
10085 json.WriteString(
"Size");
10086 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
10088 json.WriteString(
"Flags");
10089 json.BeginArray(
true);
10090 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
10092 json.WriteString(
"DEVICE_LOCAL");
10098 json.WriteString(
"Stats");
10099 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
10102 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
10104 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
10106 json.BeginString(
"Type ");
10107 json.ContinueString(typeIndex);
10110 json.BeginObject();
10112 json.WriteString(
"Flags");
10113 json.BeginArray(
true);
10114 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
10115 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
10117 json.WriteString(
"DEVICE_LOCAL");
10119 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
10121 json.WriteString(
"HOST_VISIBLE");
10123 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
10125 json.WriteString(
"HOST_COHERENT");
10127 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
10129 json.WriteString(
"HOST_CACHED");
10131 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
10133 json.WriteString(
"LAZILY_ALLOCATED");
10139 json.WriteString(
"Stats");
10140 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
10149 if(detailedMap == VK_TRUE)
10151 allocator->PrintDetailedMap(json);
10157 const size_t len = sb.GetLength();
10158 char*
const pChars = vma_new_array(allocator,
char, len + 1);
10161 memcpy(pChars, sb.GetData(), len);
10163 pChars[len] =
'\0';
10164 *ppStatsString = pChars;
10169 char* pStatsString)
10171 if(pStatsString != VMA_NULL)
10173 VMA_ASSERT(allocator);
10174 size_t len = strlen(pStatsString);
10175 vma_delete_array(allocator, pStatsString, len + 1);
10179 #endif // #if VMA_STATS_STRING_ENABLED 10186 uint32_t memoryTypeBits,
10188 uint32_t* pMemoryTypeIndex)
10190 VMA_ASSERT(allocator != VK_NULL_HANDLE);
10191 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
10192 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
10199 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
10200 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
10205 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
10209 switch(pAllocationCreateInfo->
usage)
10214 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
10216 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
10220 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10223 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
10224 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
10226 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
10230 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
10231 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
10237 *pMemoryTypeIndex = UINT32_MAX;
10238 uint32_t minCost = UINT32_MAX;
10239 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
10240 memTypeIndex < allocator->GetMemoryTypeCount();
10241 ++memTypeIndex, memTypeBit <<= 1)
10244 if((memTypeBit & memoryTypeBits) != 0)
10246 const VkMemoryPropertyFlags currFlags =
10247 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
10249 if((requiredFlags & ~currFlags) == 0)
10252 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
10254 if(currCost < minCost)
10256 *pMemoryTypeIndex = memTypeIndex;
10261 minCost = currCost;
10266 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
10271 const VkBufferCreateInfo* pBufferCreateInfo,
10273 uint32_t* pMemoryTypeIndex)
10275 VMA_ASSERT(allocator != VK_NULL_HANDLE);
10276 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
10277 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
10278 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
10280 const VkDevice hDev = allocator->m_hDevice;
10281 VkBuffer hBuffer = VK_NULL_HANDLE;
10282 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
10283 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
10284 if(res == VK_SUCCESS)
10286 VkMemoryRequirements memReq = {};
10287 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
10288 hDev, hBuffer, &memReq);
10292 memReq.memoryTypeBits,
10293 pAllocationCreateInfo,
10296 allocator->GetVulkanFunctions().vkDestroyBuffer(
10297 hDev, hBuffer, allocator->GetAllocationCallbacks());
10304 const VkImageCreateInfo* pImageCreateInfo,
10306 uint32_t* pMemoryTypeIndex)
10308 VMA_ASSERT(allocator != VK_NULL_HANDLE);
10309 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
10310 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
10311 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
10313 const VkDevice hDev = allocator->m_hDevice;
10314 VkImage hImage = VK_NULL_HANDLE;
10315 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
10316 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
10317 if(res == VK_SUCCESS)
10319 VkMemoryRequirements memReq = {};
10320 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
10321 hDev, hImage, &memReq);
10325 memReq.memoryTypeBits,
10326 pAllocationCreateInfo,
10329 allocator->GetVulkanFunctions().vkDestroyImage(
10330 hDev, hImage, allocator->GetAllocationCallbacks());
10340 VMA_ASSERT(allocator && pCreateInfo && pPool);
10342 VMA_DEBUG_LOG(
"vmaCreatePool");
10344 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10346 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
10348 #if VMA_RECORDING_ENABLED 10349 if(allocator->GetRecorder() != VMA_NULL)
10351 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
10362 VMA_ASSERT(allocator);
10364 if(pool == VK_NULL_HANDLE)
10369 VMA_DEBUG_LOG(
"vmaDestroyPool");
10371 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10373 #if VMA_RECORDING_ENABLED 10374 if(allocator->GetRecorder() != VMA_NULL)
10376 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
10380 allocator->DestroyPool(pool);
10388 VMA_ASSERT(allocator && pool && pPoolStats);
10390 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10392 allocator->GetPoolStats(pool, pPoolStats);
10398 size_t* pLostAllocationCount)
10400 VMA_ASSERT(allocator && pool);
10402 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10404 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
10409 VMA_ASSERT(allocator && pool);
10411 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10413 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
10415 return allocator->CheckPoolCorruption(pool);
10420 const VkMemoryRequirements* pVkMemoryRequirements,
10425 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
10427 VMA_DEBUG_LOG(
"vmaAllocateMemory");
10429 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10431 VkResult result = allocator->AllocateMemory(
10432 *pVkMemoryRequirements,
10438 VMA_SUBALLOCATION_TYPE_UNKNOWN,
10441 #if VMA_RECORDING_ENABLED 10442 if(allocator->GetRecorder() != VMA_NULL)
10444 allocator->GetRecorder()->RecordAllocateMemory(
10445 allocator->GetCurrentFrameIndex(),
10446 *pVkMemoryRequirements,
10452 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
10454 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
10467 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
10469 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
10471 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10473 VkMemoryRequirements vkMemReq = {};
10474 bool requiresDedicatedAllocation =
false;
10475 bool prefersDedicatedAllocation =
false;
10476 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
10477 requiresDedicatedAllocation,
10478 prefersDedicatedAllocation);
10480 VkResult result = allocator->AllocateMemory(
10482 requiresDedicatedAllocation,
10483 prefersDedicatedAllocation,
10487 VMA_SUBALLOCATION_TYPE_BUFFER,
10490 #if VMA_RECORDING_ENABLED 10491 if(allocator->GetRecorder() != VMA_NULL)
10493 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
10494 allocator->GetCurrentFrameIndex(),
10496 requiresDedicatedAllocation,
10497 prefersDedicatedAllocation,
10503 if(pAllocationInfo && result == VK_SUCCESS)
10505 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
10518 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
10520 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
10522 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10524 VkMemoryRequirements vkMemReq = {};
10525 bool requiresDedicatedAllocation =
false;
10526 bool prefersDedicatedAllocation =
false;
10527 allocator->GetImageMemoryRequirements(image, vkMemReq,
10528 requiresDedicatedAllocation, prefersDedicatedAllocation);
10530 VkResult result = allocator->AllocateMemory(
10532 requiresDedicatedAllocation,
10533 prefersDedicatedAllocation,
10537 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
10540 #if VMA_RECORDING_ENABLED 10541 if(allocator->GetRecorder() != VMA_NULL)
10543 allocator->GetRecorder()->RecordAllocateMemoryForImage(
10544 allocator->GetCurrentFrameIndex(),
10546 requiresDedicatedAllocation,
10547 prefersDedicatedAllocation,
10553 if(pAllocationInfo && result == VK_SUCCESS)
10555 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
10565 VMA_ASSERT(allocator);
10567 if(allocation == VK_NULL_HANDLE)
10572 VMA_DEBUG_LOG(
"vmaFreeMemory");
10574 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10576 #if VMA_RECORDING_ENABLED 10577 if(allocator->GetRecorder() != VMA_NULL)
10579 allocator->GetRecorder()->RecordFreeMemory(
10580 allocator->GetCurrentFrameIndex(),
10585 allocator->FreeMemory(allocation);
10593 VMA_ASSERT(allocator && allocation && pAllocationInfo);
10595 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10597 #if VMA_RECORDING_ENABLED 10598 if(allocator->GetRecorder() != VMA_NULL)
10600 allocator->GetRecorder()->RecordGetAllocationInfo(
10601 allocator->GetCurrentFrameIndex(),
10606 allocator->GetAllocationInfo(allocation, pAllocationInfo);
10613 VMA_ASSERT(allocator && allocation);
10615 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10617 #if VMA_RECORDING_ENABLED 10618 if(allocator->GetRecorder() != VMA_NULL)
10620 allocator->GetRecorder()->RecordTouchAllocation(
10621 allocator->GetCurrentFrameIndex(),
10626 return allocator->TouchAllocation(allocation);
10634 VMA_ASSERT(allocator && allocation);
10636 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10638 allocation->SetUserData(allocator, pUserData);
10640 #if VMA_RECORDING_ENABLED 10641 if(allocator->GetRecorder() != VMA_NULL)
10643 allocator->GetRecorder()->RecordSetAllocationUserData(
10644 allocator->GetCurrentFrameIndex(),
10655 VMA_ASSERT(allocator && pAllocation);
10657 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
10659 allocator->CreateLostAllocation(pAllocation);
10661 #if VMA_RECORDING_ENABLED 10662 if(allocator->GetRecorder() != VMA_NULL)
10664 allocator->GetRecorder()->RecordCreateLostAllocation(
10665 allocator->GetCurrentFrameIndex(),
10676 VMA_ASSERT(allocator && allocation && ppData);
10678 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10680 VkResult res = allocator->Map(allocation, ppData);
10682 #if VMA_RECORDING_ENABLED 10683 if(allocator->GetRecorder() != VMA_NULL)
10685 allocator->GetRecorder()->RecordMapMemory(
10686 allocator->GetCurrentFrameIndex(),
10698 VMA_ASSERT(allocator && allocation);
10700 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10702 #if VMA_RECORDING_ENABLED 10703 if(allocator->GetRecorder() != VMA_NULL)
10705 allocator->GetRecorder()->RecordUnmapMemory(
10706 allocator->GetCurrentFrameIndex(),
10711 allocator->Unmap(allocation);
10716 VMA_ASSERT(allocator && allocation);
10718 VMA_DEBUG_LOG(
"vmaFlushAllocation");
10720 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10722 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
10724 #if VMA_RECORDING_ENABLED 10725 if(allocator->GetRecorder() != VMA_NULL)
10727 allocator->GetRecorder()->RecordFlushAllocation(
10728 allocator->GetCurrentFrameIndex(),
10729 allocation, offset, size);
10736 VMA_ASSERT(allocator && allocation);
10738 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
10740 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10742 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
10744 #if VMA_RECORDING_ENABLED 10745 if(allocator->GetRecorder() != VMA_NULL)
10747 allocator->GetRecorder()->RecordInvalidateAllocation(
10748 allocator->GetCurrentFrameIndex(),
10749 allocation, offset, size);
10756 VMA_ASSERT(allocator);
10758 VMA_DEBUG_LOG(
"vmaCheckCorruption");
10760 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10762 return allocator->CheckCorruption(memoryTypeBits);
10768 size_t allocationCount,
10769 VkBool32* pAllocationsChanged,
10773 VMA_ASSERT(allocator && pAllocations);
10775 VMA_DEBUG_LOG(
"vmaDefragment");
10777 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10779 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
10787 VMA_ASSERT(allocator && allocation && buffer);
10789 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
10791 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10793 return allocator->BindBufferMemory(allocation, buffer);
10801 VMA_ASSERT(allocator && allocation && image);
10803 VMA_DEBUG_LOG(
"vmaBindImageMemory");
10805 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10807 return allocator->BindImageMemory(allocation, image);
10812 const VkBufferCreateInfo* pBufferCreateInfo,
10818 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
10820 VMA_DEBUG_LOG(
"vmaCreateBuffer");
10822 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10824 *pBuffer = VK_NULL_HANDLE;
10825 *pAllocation = VK_NULL_HANDLE;
10828 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
10829 allocator->m_hDevice,
10831 allocator->GetAllocationCallbacks(),
10836 VkMemoryRequirements vkMemReq = {};
10837 bool requiresDedicatedAllocation =
false;
10838 bool prefersDedicatedAllocation =
false;
10839 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
10840 requiresDedicatedAllocation, prefersDedicatedAllocation);
10844 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
10846 VMA_ASSERT(vkMemReq.alignment %
10847 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
10849 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
10851 VMA_ASSERT(vkMemReq.alignment %
10852 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
10854 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
10856 VMA_ASSERT(vkMemReq.alignment %
10857 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
10861 res = allocator->AllocateMemory(
10863 requiresDedicatedAllocation,
10864 prefersDedicatedAllocation,
10867 *pAllocationCreateInfo,
10868 VMA_SUBALLOCATION_TYPE_BUFFER,
10871 #if VMA_RECORDING_ENABLED 10872 if(allocator->GetRecorder() != VMA_NULL)
10874 allocator->GetRecorder()->RecordCreateBuffer(
10875 allocator->GetCurrentFrameIndex(),
10876 *pBufferCreateInfo,
10877 *pAllocationCreateInfo,
10885 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
10889 #if VMA_STATS_STRING_ENABLED 10890 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
10892 if(pAllocationInfo != VMA_NULL)
10894 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
10899 allocator->FreeMemory(*pAllocation);
10900 *pAllocation = VK_NULL_HANDLE;
10901 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
10902 *pBuffer = VK_NULL_HANDLE;
10905 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
10906 *pBuffer = VK_NULL_HANDLE;
10917 VMA_ASSERT(allocator);
10919 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
10924 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
10926 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10928 #if VMA_RECORDING_ENABLED 10929 if(allocator->GetRecorder() != VMA_NULL)
10931 allocator->GetRecorder()->RecordDestroyBuffer(
10932 allocator->GetCurrentFrameIndex(),
10937 if(buffer != VK_NULL_HANDLE)
10939 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
10942 if(allocation != VK_NULL_HANDLE)
10944 allocator->FreeMemory(allocation);
10950 const VkImageCreateInfo* pImageCreateInfo,
10956 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
10958 VMA_DEBUG_LOG(
"vmaCreateImage");
10960 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10962 *pImage = VK_NULL_HANDLE;
10963 *pAllocation = VK_NULL_HANDLE;
10966 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
10967 allocator->m_hDevice,
10969 allocator->GetAllocationCallbacks(),
10973 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
10974 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
10975 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
10978 VkMemoryRequirements vkMemReq = {};
10979 bool requiresDedicatedAllocation =
false;
10980 bool prefersDedicatedAllocation =
false;
10981 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
10982 requiresDedicatedAllocation, prefersDedicatedAllocation);
10984 res = allocator->AllocateMemory(
10986 requiresDedicatedAllocation,
10987 prefersDedicatedAllocation,
10990 *pAllocationCreateInfo,
10994 #if VMA_RECORDING_ENABLED 10995 if(allocator->GetRecorder() != VMA_NULL)
10997 allocator->GetRecorder()->RecordCreateImage(
10998 allocator->GetCurrentFrameIndex(),
11000 *pAllocationCreateInfo,
11008 res = allocator->BindImageMemory(*pAllocation, *pImage);
11012 #if VMA_STATS_STRING_ENABLED 11013 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
11015 if(pAllocationInfo != VMA_NULL)
11017 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
11022 allocator->FreeMemory(*pAllocation);
11023 *pAllocation = VK_NULL_HANDLE;
11024 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
11025 *pImage = VK_NULL_HANDLE;
11028 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
11029 *pImage = VK_NULL_HANDLE;
11040 VMA_ASSERT(allocator);
11042 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
11047 VMA_DEBUG_LOG(
"vmaDestroyImage");
11049 VMA_DEBUG_GLOBAL_MUTEX_LOCK
11051 #if VMA_RECORDING_ENABLED 11052 if(allocator->GetRecorder() != VMA_NULL)
11054 allocator->GetRecorder()->RecordDestroyImage(
11055 allocator->GetCurrentFrameIndex(),
11060 if(image != VK_NULL_HANDLE)
11062 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
11064 if(allocation != VK_NULL_HANDLE)
11066 allocator->FreeMemory(allocation);
11070 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1345
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1658
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1367
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1414
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1329
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1310
+
Definition: vk_mem_alloc.h:1376
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1357
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1568
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1302
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1957
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1364
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2202
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1787
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1341
+
Definition: vk_mem_alloc.h:1615
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1349
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2004
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1411
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2249
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1834
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1388
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1841
-
Definition: vk_mem_alloc.h:1648
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1291
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1686
-
Definition: vk_mem_alloc.h:1595
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1376
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1888
+
Definition: vk_mem_alloc.h:1695
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1338
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1733
+
Definition: vk_mem_alloc.h:1642
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1423
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1429
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1361
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1476
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1408
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1599
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1646
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1501
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1307
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1500
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2206
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1548
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1354
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1547
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2253
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1393
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1510
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2214
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1670
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2197
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1308
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1233
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1440
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1557
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2261
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1717
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2244
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1355
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1280
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1370
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1417
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1795
-
Definition: vk_mem_alloc.h:1789
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1436
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1967
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1842
+
Definition: vk_mem_alloc.h:1836
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1483
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2014
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1303
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1327
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1707
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1811
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1847
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1350
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1374
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1754
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1858
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1894
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1289
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1798
+
Definition: vk_mem_alloc.h:1336
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1845
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1546
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1593
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2192
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2239
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2210
-
Definition: vk_mem_alloc.h:1585
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1694
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1306
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2257
+
Definition: vk_mem_alloc.h:1632
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1741
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1353
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1506
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1239
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1553
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1286
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1260
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1307
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1331
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1265
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2212
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1378
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1312
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2259
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1681
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1857
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1728
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1904
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1299
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1489
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1806
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1252
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1346
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1536
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1853
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1299
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1655
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1502
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1256
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1801
-
Definition: vk_mem_alloc.h:1594
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1305
+
Definition: vk_mem_alloc.h:1702
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1549
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1303
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1848
+
Definition: vk_mem_alloc.h:1641
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1352
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1676
-
Definition: vk_mem_alloc.h:1667
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1723
+
Definition: vk_mem_alloc.h:1714
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1492
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1301
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1819
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1379
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1850
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1665
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1700
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1539
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1348
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1866
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1426
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1897
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1712
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1747
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1417
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1508
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1635
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1501
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1464
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1555
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1682
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1548
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1312
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1349
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1254
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1311
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1359
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1396
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1301
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1358
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1833
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1304
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1880
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1351
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1357
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1981
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1373
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1501
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1498
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1404
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2028
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1420
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1548
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1545
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1838
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1885
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1962
-
Definition: vk_mem_alloc.h:1663
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2208
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1297
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2009
+
Definition: vk_mem_alloc.h:1710
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2255
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1344
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1496
-
Definition: vk_mem_alloc.h:1551
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1791
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1543
+
Definition: vk_mem_alloc.h:1598
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1838
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1346
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1494
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1309
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1313
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1622
-
Definition: vk_mem_alloc.h:1578
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1976
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1393
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1541
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1356
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1360
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1669
+
Definition: vk_mem_alloc.h:1625
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2023
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1287
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1334
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1300
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1943
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1347
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1990
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1769
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1502
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1816
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1549
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
-
Definition: vk_mem_alloc.h:1661
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1321
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1509
+
Definition: vk_mem_alloc.h:1708
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1368
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1556
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1844
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1502
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1891
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1549
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1948
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1995