23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1472 #ifndef VMA_RECORDING_ENABLED 1474 #define VMA_RECORDING_ENABLED 1 1476 #define VMA_RECORDING_ENABLED 0 1481 #define NOMINMAX // For windows.h 1484 #include <vulkan/vulkan.h> 1486 #if VMA_RECORDING_ENABLED 1487 #include <windows.h> 1490 #if !defined(VMA_DEDICATED_ALLOCATION) 1491 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1492 #define VMA_DEDICATED_ALLOCATION 1 1494 #define VMA_DEDICATED_ALLOCATION 0 1512 uint32_t memoryType,
1513 VkDeviceMemory memory,
1518 uint32_t memoryType,
1519 VkDeviceMemory memory,
1591 #if VMA_DEDICATED_ALLOCATION 1592 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1593 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1719 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1727 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1737 uint32_t memoryTypeIndex,
1738 VkMemoryPropertyFlags* pFlags);
1750 uint32_t frameIndex);
1783 #define VMA_STATS_STRING_ENABLED 1 1785 #if VMA_STATS_STRING_ENABLED 1792 char** ppStatsString,
1793 VkBool32 detailedMap);
1797 char* pStatsString);
1799 #endif // #if VMA_STATS_STRING_ENABLED 2028 uint32_t memoryTypeBits,
2030 uint32_t* pMemoryTypeIndex);
2046 const VkBufferCreateInfo* pBufferCreateInfo,
2048 uint32_t* pMemoryTypeIndex);
2064 const VkImageCreateInfo* pImageCreateInfo,
2066 uint32_t* pMemoryTypeIndex);
2238 size_t* pLostAllocationCount);
2337 const VkMemoryRequirements* pVkMemoryRequirements,
2599 size_t allocationCount,
2600 VkBool32* pAllocationsChanged,
2666 const VkBufferCreateInfo* pBufferCreateInfo,
2691 const VkImageCreateInfo* pImageCreateInfo,
2717 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2720 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2721 #define VMA_IMPLEMENTATION 2724 #ifdef VMA_IMPLEMENTATION 2725 #undef VMA_IMPLEMENTATION 2747 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2748 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2760 #if VMA_USE_STL_CONTAINERS 2761 #define VMA_USE_STL_VECTOR 1 2762 #define VMA_USE_STL_UNORDERED_MAP 1 2763 #define VMA_USE_STL_LIST 1 2766 #if VMA_USE_STL_VECTOR 2770 #if VMA_USE_STL_UNORDERED_MAP 2771 #include <unordered_map> 2774 #if VMA_USE_STL_LIST 2783 #include <algorithm> 2789 #define VMA_NULL nullptr 2792 #if defined(__APPLE__) || defined(__ANDROID__) 2794 void *aligned_alloc(
size_t alignment,
size_t size)
2797 if(alignment <
sizeof(
void*))
2799 alignment =
sizeof(
void*);
2803 if(posix_memalign(&pointer, alignment, size) == 0)
2817 #define VMA_ASSERT(expr) assert(expr) 2819 #define VMA_ASSERT(expr) 2825 #ifndef VMA_HEAVY_ASSERT 2827 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2829 #define VMA_HEAVY_ASSERT(expr) 2833 #ifndef VMA_ALIGN_OF 2834 #define VMA_ALIGN_OF(type) (__alignof(type)) 2837 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2839 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2841 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2845 #ifndef VMA_SYSTEM_FREE 2847 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2849 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2854 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2858 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2862 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2866 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2869 #ifndef VMA_DEBUG_LOG 2870 #define VMA_DEBUG_LOG(format, ...) 2880 #if VMA_STATS_STRING_ENABLED 2881 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2883 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2885 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2887 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2889 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2891 snprintf(outStr, strLen,
"%p", ptr);
2901 void Lock() { m_Mutex.lock(); }
2902 void Unlock() { m_Mutex.unlock(); }
2906 #define VMA_MUTEX VmaMutex 2917 #ifndef VMA_ATOMIC_UINT32 2918 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2921 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2926 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2929 #ifndef VMA_DEBUG_ALIGNMENT 2934 #define VMA_DEBUG_ALIGNMENT (1) 2937 #ifndef VMA_DEBUG_MARGIN 2942 #define VMA_DEBUG_MARGIN (0) 2945 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2950 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2953 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2959 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2962 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2967 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2970 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2975 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2978 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2979 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2983 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2984 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2988 #ifndef VMA_CLASS_NO_COPY 2989 #define VMA_CLASS_NO_COPY(className) \ 2991 className(const className&) = delete; \ 2992 className& operator=(const className&) = delete; 2995 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2998 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3000 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3001 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3007 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3008 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3011 static inline uint32_t VmaCountBitsSet(uint32_t v)
3013 uint32_t c = v - ((v >> 1) & 0x55555555);
3014 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3015 c = ((c >> 4) + c) & 0x0F0F0F0F;
3016 c = ((c >> 8) + c) & 0x00FF00FF;
3017 c = ((c >> 16) + c) & 0x0000FFFF;
3023 template <
typename T>
3024 static inline T VmaAlignUp(T val, T align)
3026 return (val + align - 1) / align * align;
3030 template <
typename T>
3031 static inline T VmaAlignDown(T val, T align)
3033 return val / align * align;
3037 template <
typename T>
3038 static inline T VmaRoundDiv(T x, T y)
3040 return (x + (y / (T)2)) / y;
3048 template <
typename T>
3049 inline bool VmaIsPow2(T x)
3051 return (x & (x-1)) == 0;
3055 static inline uint32_t VmaNextPow2(uint32_t v)
3066 static inline uint64_t VmaNextPow2(uint64_t v)
3080 static inline uint32_t VmaPrevPow2(uint32_t v)
3090 static inline uint64_t VmaPrevPow2(uint64_t v)
3102 static inline bool VmaStrIsEmpty(
const char* pStr)
3104 return pStr == VMA_NULL || *pStr ==
'\0';
3107 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3125 template<
typename Iterator,
typename Compare>
3126 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3128 Iterator centerValue = end; --centerValue;
3129 Iterator insertIndex = beg;
3130 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3132 if(cmp(*memTypeIndex, *centerValue))
3134 if(insertIndex != memTypeIndex)
3136 VMA_SWAP(*memTypeIndex, *insertIndex);
3141 if(insertIndex != centerValue)
3143 VMA_SWAP(*insertIndex, *centerValue);
3148 template<
typename Iterator,
typename Compare>
3149 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3153 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3154 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3155 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3159 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3161 #endif // #ifndef VMA_SORT 3170 static inline bool VmaBlocksOnSamePage(
3171 VkDeviceSize resourceAOffset,
3172 VkDeviceSize resourceASize,
3173 VkDeviceSize resourceBOffset,
3174 VkDeviceSize pageSize)
3176 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3177 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3178 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3179 VkDeviceSize resourceBStart = resourceBOffset;
3180 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3181 return resourceAEndPage == resourceBStartPage;
3184 enum VmaSuballocationType
3186 VMA_SUBALLOCATION_TYPE_FREE = 0,
3187 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3188 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3189 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3190 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3191 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3192 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3201 static inline bool VmaIsBufferImageGranularityConflict(
3202 VmaSuballocationType suballocType1,
3203 VmaSuballocationType suballocType2)
3205 if(suballocType1 > suballocType2)
3207 VMA_SWAP(suballocType1, suballocType2);
3210 switch(suballocType1)
3212 case VMA_SUBALLOCATION_TYPE_FREE:
3214 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3216 case VMA_SUBALLOCATION_TYPE_BUFFER:
3218 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3219 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3220 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3222 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3223 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3224 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3225 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3227 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3228 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3236 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3238 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3239 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3240 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3242 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3246 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3248 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3249 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3250 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3252 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3263 VMA_CLASS_NO_COPY(VmaMutexLock)
3265 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3266 m_pMutex(useMutex ? &mutex : VMA_NULL)
3283 VMA_MUTEX* m_pMutex;
3286 #if VMA_DEBUG_GLOBAL_MUTEX 3287 static VMA_MUTEX gDebugGlobalMutex;
3288 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3290 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3294 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3305 template <
typename CmpLess,
typename IterT,
typename KeyT>
3306 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3308 size_t down = 0, up = (end - beg);
3311 const size_t mid = (down + up) / 2;
3312 if(cmp(*(beg+mid), key))
3327 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3329 if((pAllocationCallbacks != VMA_NULL) &&
3330 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3332 return (*pAllocationCallbacks->pfnAllocation)(
3333 pAllocationCallbacks->pUserData,
3336 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3340 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3344 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3346 if((pAllocationCallbacks != VMA_NULL) &&
3347 (pAllocationCallbacks->pfnFree != VMA_NULL))
3349 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3353 VMA_SYSTEM_FREE(ptr);
3357 template<
typename T>
3358 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3360 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3363 template<
typename T>
3364 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3366 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3369 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3371 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3373 template<
typename T>
3374 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3377 VmaFree(pAllocationCallbacks, ptr);
3380 template<
typename T>
3381 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3385 for(
size_t i = count; i--; )
3389 VmaFree(pAllocationCallbacks, ptr);
3394 template<
typename T>
3395 class VmaStlAllocator
3398 const VkAllocationCallbacks*
const m_pCallbacks;
3399 typedef T value_type;
3401 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3402 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3404 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3405 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3407 template<
typename U>
3408 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3410 return m_pCallbacks == rhs.m_pCallbacks;
3412 template<
typename U>
3413 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3415 return m_pCallbacks != rhs.m_pCallbacks;
3418 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3421 #if VMA_USE_STL_VECTOR 3423 #define VmaVector std::vector 3425 template<
typename T,
typename allocatorT>
3426 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3428 vec.insert(vec.begin() + index, item);
3431 template<
typename T,
typename allocatorT>
3432 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3434 vec.erase(vec.begin() + index);
3437 #else // #if VMA_USE_STL_VECTOR 3442 template<
typename T,
typename AllocatorT>
3446 typedef T value_type;
3448 VmaVector(
const AllocatorT& allocator) :
3449 m_Allocator(allocator),
3456 VmaVector(
size_t count,
const AllocatorT& allocator) :
3457 m_Allocator(allocator),
3458 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3464 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3465 m_Allocator(src.m_Allocator),
3466 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3467 m_Count(src.m_Count),
3468 m_Capacity(src.m_Count)
3472 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3478 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3481 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3485 resize(rhs.m_Count);
3488 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3494 bool empty()
const {
return m_Count == 0; }
3495 size_t size()
const {
return m_Count; }
3496 T* data() {
return m_pArray; }
3497 const T* data()
const {
return m_pArray; }
3499 T& operator[](
size_t index)
3501 VMA_HEAVY_ASSERT(index < m_Count);
3502 return m_pArray[index];
3504 const T& operator[](
size_t index)
const 3506 VMA_HEAVY_ASSERT(index < m_Count);
3507 return m_pArray[index];
3512 VMA_HEAVY_ASSERT(m_Count > 0);
3515 const T& front()
const 3517 VMA_HEAVY_ASSERT(m_Count > 0);
3522 VMA_HEAVY_ASSERT(m_Count > 0);
3523 return m_pArray[m_Count - 1];
3525 const T& back()
const 3527 VMA_HEAVY_ASSERT(m_Count > 0);
3528 return m_pArray[m_Count - 1];
3531 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3533 newCapacity = VMA_MAX(newCapacity, m_Count);
3535 if((newCapacity < m_Capacity) && !freeMemory)
3537 newCapacity = m_Capacity;
3540 if(newCapacity != m_Capacity)
3542 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3545 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3547 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3548 m_Capacity = newCapacity;
3549 m_pArray = newArray;
3553 void resize(
size_t newCount,
bool freeMemory =
false)
3555 size_t newCapacity = m_Capacity;
3556 if(newCount > m_Capacity)
3558 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3562 newCapacity = newCount;
3565 if(newCapacity != m_Capacity)
3567 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3568 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3569 if(elementsToCopy != 0)
3571 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3573 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3574 m_Capacity = newCapacity;
3575 m_pArray = newArray;
3581 void clear(
bool freeMemory =
false)
3583 resize(0, freeMemory);
3586 void insert(
size_t index,
const T& src)
3588 VMA_HEAVY_ASSERT(index <= m_Count);
3589 const size_t oldCount = size();
3590 resize(oldCount + 1);
3591 if(index < oldCount)
3593 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3595 m_pArray[index] = src;
3598 void remove(
size_t index)
3600 VMA_HEAVY_ASSERT(index < m_Count);
3601 const size_t oldCount = size();
3602 if(index < oldCount - 1)
3604 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3606 resize(oldCount - 1);
3609 void push_back(
const T& src)
3611 const size_t newIndex = size();
3612 resize(newIndex + 1);
3613 m_pArray[newIndex] = src;
3618 VMA_HEAVY_ASSERT(m_Count > 0);
3622 void push_front(
const T& src)
3629 VMA_HEAVY_ASSERT(m_Count > 0);
3633 typedef T* iterator;
3635 iterator begin() {
return m_pArray; }
3636 iterator end() {
return m_pArray + m_Count; }
3639 AllocatorT m_Allocator;
3645 template<
typename T,
typename allocatorT>
3646 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3648 vec.insert(index, item);
3651 template<
typename T,
typename allocatorT>
3652 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3657 #endif // #if VMA_USE_STL_VECTOR 3659 template<
typename CmpLess,
typename VectorT>
3660 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3662 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3664 vector.data() + vector.size(),
3666 CmpLess()) - vector.data();
3667 VmaVectorInsert(vector, indexToInsert, value);
3668 return indexToInsert;
3671 template<
typename CmpLess,
typename VectorT>
3672 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3675 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3680 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3682 size_t indexToRemove = it - vector.begin();
3683 VmaVectorRemove(vector, indexToRemove);
3689 template<
typename CmpLess,
typename IterT,
typename KeyT>
3690 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3693 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3694 beg, end, value, comparator);
3696 (!comparator(*it, value) && !comparator(value, *it)))
3711 template<
typename T>
3712 class VmaPoolAllocator
3714 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3716 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3717 ~VmaPoolAllocator();
3725 uint32_t NextFreeIndex;
3732 uint32_t FirstFreeIndex;
3735 const VkAllocationCallbacks* m_pAllocationCallbacks;
3736 size_t m_ItemsPerBlock;
3737 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3739 ItemBlock& CreateNewBlock();
3742 template<
typename T>
3743 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3744 m_pAllocationCallbacks(pAllocationCallbacks),
3745 m_ItemsPerBlock(itemsPerBlock),
3746 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3748 VMA_ASSERT(itemsPerBlock > 0);
3751 template<
typename T>
3752 VmaPoolAllocator<T>::~VmaPoolAllocator()
3757 template<
typename T>
3758 void VmaPoolAllocator<T>::Clear()
3760 for(
size_t i = m_ItemBlocks.size(); i--; )
3761 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3762 m_ItemBlocks.clear();
3765 template<
typename T>
3766 T* VmaPoolAllocator<T>::Alloc()
3768 for(
size_t i = m_ItemBlocks.size(); i--; )
3770 ItemBlock& block = m_ItemBlocks[i];
3772 if(block.FirstFreeIndex != UINT32_MAX)
3774 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3775 block.FirstFreeIndex = pItem->NextFreeIndex;
3776 return &pItem->Value;
3781 ItemBlock& newBlock = CreateNewBlock();
3782 Item*
const pItem = &newBlock.pItems[0];
3783 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3784 return &pItem->Value;
3787 template<
typename T>
3788 void VmaPoolAllocator<T>::Free(T* ptr)
3791 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3793 ItemBlock& block = m_ItemBlocks[i];
3797 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3800 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3802 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3803 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3804 block.FirstFreeIndex = index;
3808 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3811 template<
typename T>
3812 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3814 ItemBlock newBlock = {
3815 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3817 m_ItemBlocks.push_back(newBlock);
3820 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3821 newBlock.pItems[i].NextFreeIndex = i + 1;
3822 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3823 return m_ItemBlocks.back();
3829 #if VMA_USE_STL_LIST 3831 #define VmaList std::list 3833 #else // #if VMA_USE_STL_LIST 3835 template<
typename T>
3844 template<
typename T>
3847 VMA_CLASS_NO_COPY(VmaRawList)
3849 typedef VmaListItem<T> ItemType;
3851 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3855 size_t GetCount()
const {
return m_Count; }
3856 bool IsEmpty()
const {
return m_Count == 0; }
3858 ItemType* Front() {
return m_pFront; }
3859 const ItemType* Front()
const {
return m_pFront; }
3860 ItemType* Back() {
return m_pBack; }
3861 const ItemType* Back()
const {
return m_pBack; }
3863 ItemType* PushBack();
3864 ItemType* PushFront();
3865 ItemType* PushBack(
const T& value);
3866 ItemType* PushFront(
const T& value);
3871 ItemType* InsertBefore(ItemType* pItem);
3873 ItemType* InsertAfter(ItemType* pItem);
3875 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3876 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3878 void Remove(ItemType* pItem);
3881 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3882 VmaPoolAllocator<ItemType> m_ItemAllocator;
3888 template<
typename T>
3889 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3890 m_pAllocationCallbacks(pAllocationCallbacks),
3891 m_ItemAllocator(pAllocationCallbacks, 128),
3898 template<
typename T>
3899 VmaRawList<T>::~VmaRawList()
3905 template<
typename T>
3906 void VmaRawList<T>::Clear()
3908 if(IsEmpty() ==
false)
3910 ItemType* pItem = m_pBack;
3911 while(pItem != VMA_NULL)
3913 ItemType*
const pPrevItem = pItem->pPrev;
3914 m_ItemAllocator.Free(pItem);
3917 m_pFront = VMA_NULL;
3923 template<
typename T>
3924 VmaListItem<T>* VmaRawList<T>::PushBack()
3926 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3927 pNewItem->pNext = VMA_NULL;
3930 pNewItem->pPrev = VMA_NULL;
3931 m_pFront = pNewItem;
3937 pNewItem->pPrev = m_pBack;
3938 m_pBack->pNext = pNewItem;
3945 template<
typename T>
3946 VmaListItem<T>* VmaRawList<T>::PushFront()
3948 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3949 pNewItem->pPrev = VMA_NULL;
3952 pNewItem->pNext = VMA_NULL;
3953 m_pFront = pNewItem;
3959 pNewItem->pNext = m_pFront;
3960 m_pFront->pPrev = pNewItem;
3961 m_pFront = pNewItem;
3967 template<
typename T>
3968 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3970 ItemType*
const pNewItem = PushBack();
3971 pNewItem->Value = value;
3975 template<
typename T>
3976 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3978 ItemType*
const pNewItem = PushFront();
3979 pNewItem->Value = value;
3983 template<
typename T>
3984 void VmaRawList<T>::PopBack()
3986 VMA_HEAVY_ASSERT(m_Count > 0);
3987 ItemType*
const pBackItem = m_pBack;
3988 ItemType*
const pPrevItem = pBackItem->pPrev;
3989 if(pPrevItem != VMA_NULL)
3991 pPrevItem->pNext = VMA_NULL;
3993 m_pBack = pPrevItem;
3994 m_ItemAllocator.Free(pBackItem);
3998 template<
typename T>
3999 void VmaRawList<T>::PopFront()
4001 VMA_HEAVY_ASSERT(m_Count > 0);
4002 ItemType*
const pFrontItem = m_pFront;
4003 ItemType*
const pNextItem = pFrontItem->pNext;
4004 if(pNextItem != VMA_NULL)
4006 pNextItem->pPrev = VMA_NULL;
4008 m_pFront = pNextItem;
4009 m_ItemAllocator.Free(pFrontItem);
4013 template<
typename T>
4014 void VmaRawList<T>::Remove(ItemType* pItem)
4016 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4017 VMA_HEAVY_ASSERT(m_Count > 0);
4019 if(pItem->pPrev != VMA_NULL)
4021 pItem->pPrev->pNext = pItem->pNext;
4025 VMA_HEAVY_ASSERT(m_pFront == pItem);
4026 m_pFront = pItem->pNext;
4029 if(pItem->pNext != VMA_NULL)
4031 pItem->pNext->pPrev = pItem->pPrev;
4035 VMA_HEAVY_ASSERT(m_pBack == pItem);
4036 m_pBack = pItem->pPrev;
4039 m_ItemAllocator.Free(pItem);
4043 template<
typename T>
4044 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4046 if(pItem != VMA_NULL)
4048 ItemType*
const prevItem = pItem->pPrev;
4049 ItemType*
const newItem = m_ItemAllocator.Alloc();
4050 newItem->pPrev = prevItem;
4051 newItem->pNext = pItem;
4052 pItem->pPrev = newItem;
4053 if(prevItem != VMA_NULL)
4055 prevItem->pNext = newItem;
4059 VMA_HEAVY_ASSERT(m_pFront == pItem);
4069 template<
typename T>
4070 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4072 if(pItem != VMA_NULL)
4074 ItemType*
const nextItem = pItem->pNext;
4075 ItemType*
const newItem = m_ItemAllocator.Alloc();
4076 newItem->pNext = nextItem;
4077 newItem->pPrev = pItem;
4078 pItem->pNext = newItem;
4079 if(nextItem != VMA_NULL)
4081 nextItem->pPrev = newItem;
4085 VMA_HEAVY_ASSERT(m_pBack == pItem);
4095 template<
typename T>
4096 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4098 ItemType*
const newItem = InsertBefore(pItem);
4099 newItem->Value = value;
4103 template<
typename T>
4104 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4106 ItemType*
const newItem = InsertAfter(pItem);
4107 newItem->Value = value;
4111 template<
typename T,
typename AllocatorT>
4114 VMA_CLASS_NO_COPY(VmaList)
4125 T& operator*()
const 4127 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4128 return m_pItem->Value;
4130 T* operator->()
const 4132 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4133 return &m_pItem->Value;
4136 iterator& operator++()
4138 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4139 m_pItem = m_pItem->pNext;
4142 iterator& operator--()
4144 if(m_pItem != VMA_NULL)
4146 m_pItem = m_pItem->pPrev;
4150 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4151 m_pItem = m_pList->Back();
4156 iterator operator++(
int)
4158 iterator result = *
this;
4162 iterator operator--(
int)
4164 iterator result = *
this;
4169 bool operator==(
const iterator& rhs)
const 4171 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4172 return m_pItem == rhs.m_pItem;
4174 bool operator!=(
const iterator& rhs)
const 4176 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4177 return m_pItem != rhs.m_pItem;
4181 VmaRawList<T>* m_pList;
4182 VmaListItem<T>* m_pItem;
4184 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4190 friend class VmaList<T, AllocatorT>;
4193 class const_iterator
4202 const_iterator(
const iterator& src) :
4203 m_pList(src.m_pList),
4204 m_pItem(src.m_pItem)
4208 const T& operator*()
const 4210 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4211 return m_pItem->Value;
4213 const T* operator->()
const 4215 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4216 return &m_pItem->Value;
4219 const_iterator& operator++()
4221 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4222 m_pItem = m_pItem->pNext;
4225 const_iterator& operator--()
4227 if(m_pItem != VMA_NULL)
4229 m_pItem = m_pItem->pPrev;
4233 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4234 m_pItem = m_pList->Back();
4239 const_iterator operator++(
int)
4241 const_iterator result = *
this;
4245 const_iterator operator--(
int)
4247 const_iterator result = *
this;
4252 bool operator==(
const const_iterator& rhs)
const 4254 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4255 return m_pItem == rhs.m_pItem;
4257 bool operator!=(
const const_iterator& rhs)
const 4259 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4260 return m_pItem != rhs.m_pItem;
4264 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4270 const VmaRawList<T>* m_pList;
4271 const VmaListItem<T>* m_pItem;
4273 friend class VmaList<T, AllocatorT>;
4276 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4278 bool empty()
const {
return m_RawList.IsEmpty(); }
4279 size_t size()
const {
return m_RawList.GetCount(); }
4281 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4282 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4284 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4285 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4287 void clear() { m_RawList.Clear(); }
4288 void push_back(
const T& value) { m_RawList.PushBack(value); }
4289 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4290 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4293 VmaRawList<T> m_RawList;
4296 #endif // #if VMA_USE_STL_LIST 4304 #if VMA_USE_STL_UNORDERED_MAP 4306 #define VmaPair std::pair 4308 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4309 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4311 #else // #if VMA_USE_STL_UNORDERED_MAP 4313 template<
typename T1,
typename T2>
4319 VmaPair() : first(), second() { }
4320 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4326 template<
typename KeyT,
typename ValueT>
4330 typedef VmaPair<KeyT, ValueT> PairType;
4331 typedef PairType* iterator;
4333 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4335 iterator begin() {
return m_Vector.begin(); }
4336 iterator end() {
return m_Vector.end(); }
4338 void insert(
const PairType& pair);
4339 iterator find(
const KeyT& key);
4340 void erase(iterator it);
4343 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4346 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4348 template<
typename FirstT,
typename SecondT>
4349 struct VmaPairFirstLess
4351 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4353 return lhs.first < rhs.first;
4355 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4357 return lhs.first < rhsFirst;
4361 template<
typename KeyT,
typename ValueT>
4362 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4364 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4366 m_Vector.data() + m_Vector.size(),
4368 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4369 VmaVectorInsert(m_Vector, indexToInsert, pair);
4372 template<
typename KeyT,
typename ValueT>
4373 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4375 PairType* it = VmaBinaryFindFirstNotLess(
4377 m_Vector.data() + m_Vector.size(),
4379 VmaPairFirstLess<KeyT, ValueT>());
4380 if((it != m_Vector.end()) && (it->first == key))
4386 return m_Vector.end();
4390 template<
typename KeyT,
typename ValueT>
4391 void VmaMap<KeyT, ValueT>::erase(iterator it)
4393 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4396 #endif // #if VMA_USE_STL_UNORDERED_MAP 4402 class VmaDeviceMemoryBlock;
4404 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4406 struct VmaAllocation_T
4408 VMA_CLASS_NO_COPY(VmaAllocation_T)
4410 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4414 FLAG_USER_DATA_STRING = 0x01,
4418 enum ALLOCATION_TYPE
4420 ALLOCATION_TYPE_NONE,
4421 ALLOCATION_TYPE_BLOCK,
4422 ALLOCATION_TYPE_DEDICATED,
4425 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4428 m_pUserData(VMA_NULL),
4429 m_LastUseFrameIndex(currentFrameIndex),
4430 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4431 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4433 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4435 #if VMA_STATS_STRING_ENABLED 4436 m_CreationFrameIndex = currentFrameIndex;
4437 m_BufferImageUsage = 0;
4443 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4446 VMA_ASSERT(m_pUserData == VMA_NULL);
4449 void InitBlockAllocation(
4451 VmaDeviceMemoryBlock* block,
4452 VkDeviceSize offset,
4453 VkDeviceSize alignment,
4455 VmaSuballocationType suballocationType,
4459 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4460 VMA_ASSERT(block != VMA_NULL);
4461 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4462 m_Alignment = alignment;
4464 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4465 m_SuballocationType = (uint8_t)suballocationType;
4466 m_BlockAllocation.m_hPool = hPool;
4467 m_BlockAllocation.m_Block = block;
4468 m_BlockAllocation.m_Offset = offset;
4469 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4474 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4475 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4476 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4477 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4478 m_BlockAllocation.m_Block = VMA_NULL;
4479 m_BlockAllocation.m_Offset = 0;
4480 m_BlockAllocation.m_CanBecomeLost =
true;
4483 void ChangeBlockAllocation(
4485 VmaDeviceMemoryBlock* block,
4486 VkDeviceSize offset);
4489 void InitDedicatedAllocation(
4490 uint32_t memoryTypeIndex,
4491 VkDeviceMemory hMemory,
4492 VmaSuballocationType suballocationType,
4496 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4497 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4498 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4501 m_SuballocationType = (uint8_t)suballocationType;
4502 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4503 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4504 m_DedicatedAllocation.m_hMemory = hMemory;
4505 m_DedicatedAllocation.m_pMappedData = pMappedData;
4508 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4509 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4510 VkDeviceSize GetSize()
const {
return m_Size; }
4511 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4512 void* GetUserData()
const {
return m_pUserData; }
4513 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4514 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4516 VmaDeviceMemoryBlock* GetBlock()
const 4518 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4519 return m_BlockAllocation.m_Block;
4521 VkDeviceSize GetOffset()
const;
4522 VkDeviceMemory GetMemory()
const;
4523 uint32_t GetMemoryTypeIndex()
const;
4524 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4525 void* GetMappedData()
const;
4526 bool CanBecomeLost()
const;
4529 uint32_t GetLastUseFrameIndex()
const 4531 return m_LastUseFrameIndex.load();
4533 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4535 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4545 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4547 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4549 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4560 void BlockAllocMap();
4561 void BlockAllocUnmap();
4562 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4565 #if VMA_STATS_STRING_ENABLED 4566 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4567 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4569 void InitBufferImageUsage(uint32_t bufferImageUsage)
4571 VMA_ASSERT(m_BufferImageUsage == 0);
4572 m_BufferImageUsage = bufferImageUsage;
4575 void PrintParameters(
class VmaJsonWriter& json)
const;
4579 VkDeviceSize m_Alignment;
4580 VkDeviceSize m_Size;
4582 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4584 uint8_t m_SuballocationType;
4591 struct BlockAllocation
4594 VmaDeviceMemoryBlock* m_Block;
4595 VkDeviceSize m_Offset;
4596 bool m_CanBecomeLost;
4600 struct DedicatedAllocation
4602 uint32_t m_MemoryTypeIndex;
4603 VkDeviceMemory m_hMemory;
4604 void* m_pMappedData;
4610 BlockAllocation m_BlockAllocation;
4612 DedicatedAllocation m_DedicatedAllocation;
4615 #if VMA_STATS_STRING_ENABLED 4616 uint32_t m_CreationFrameIndex;
4617 uint32_t m_BufferImageUsage;
4627 struct VmaSuballocation
4629 VkDeviceSize offset;
4632 VmaSuballocationType type;
4636 struct VmaSuballocationOffsetLess
4638 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4640 return lhs.offset < rhs.offset;
4643 struct VmaSuballocationOffsetGreater
4645 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4647 return lhs.offset > rhs.offset;
4651 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4654 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4669 struct VmaAllocationRequest
4671 VkDeviceSize offset;
4672 VkDeviceSize sumFreeSize;
4673 VkDeviceSize sumItemSize;
4674 VmaSuballocationList::iterator item;
4675 size_t itemsToMakeLostCount;
4678 VkDeviceSize CalcCost()
const 4680 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4688 class VmaBlockMetadata
4692 virtual ~VmaBlockMetadata() { }
4693 virtual void Init(VkDeviceSize size) { m_Size = size; }
4696 virtual bool Validate()
const = 0;
4697 VkDeviceSize GetSize()
const {
return m_Size; }
4698 virtual size_t GetAllocationCount()
const = 0;
4699 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4700 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4702 virtual bool IsEmpty()
const = 0;
4704 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4706 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4708 #if VMA_STATS_STRING_ENABLED 4709 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4715 virtual bool CreateAllocationRequest(
4716 uint32_t currentFrameIndex,
4717 uint32_t frameInUseCount,
4718 VkDeviceSize bufferImageGranularity,
4719 VkDeviceSize allocSize,
4720 VkDeviceSize allocAlignment,
4722 VmaSuballocationType allocType,
4723 bool canMakeOtherLost,
4725 VmaAllocationRequest* pAllocationRequest) = 0;
4727 virtual bool MakeRequestedAllocationsLost(
4728 uint32_t currentFrameIndex,
4729 uint32_t frameInUseCount,
4730 VmaAllocationRequest* pAllocationRequest) = 0;
4732 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4734 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4738 const VmaAllocationRequest& request,
4739 VmaSuballocationType type,
4740 VkDeviceSize allocSize,
4746 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4749 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
4751 #if VMA_STATS_STRING_ENABLED 4752 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4753 VkDeviceSize unusedBytes,
4754 size_t allocationCount,
4755 size_t unusedRangeCount)
const;
4756 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4757 VkDeviceSize offset,
4759 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4760 VkDeviceSize offset,
4761 VkDeviceSize size)
const;
4762 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4766 VkDeviceSize m_Size;
4767 const VkAllocationCallbacks* m_pAllocationCallbacks;
4770 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 4771 VMA_ASSERT(0 && "Validation failed: " #cond); \ 4775 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4777 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4780 virtual ~VmaBlockMetadata_Generic();
4781 virtual void Init(VkDeviceSize size);
4783 virtual bool Validate()
const;
4784 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4785 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4786 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4787 virtual bool IsEmpty()
const;
4789 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4790 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4792 #if VMA_STATS_STRING_ENABLED 4793 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4796 virtual bool CreateAllocationRequest(
4797 uint32_t currentFrameIndex,
4798 uint32_t frameInUseCount,
4799 VkDeviceSize bufferImageGranularity,
4800 VkDeviceSize allocSize,
4801 VkDeviceSize allocAlignment,
4803 VmaSuballocationType allocType,
4804 bool canMakeOtherLost,
4806 VmaAllocationRequest* pAllocationRequest);
4808 virtual bool MakeRequestedAllocationsLost(
4809 uint32_t currentFrameIndex,
4810 uint32_t frameInUseCount,
4811 VmaAllocationRequest* pAllocationRequest);
4813 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4815 virtual VkResult CheckCorruption(
const void* pBlockData);
4818 const VmaAllocationRequest& request,
4819 VmaSuballocationType type,
4820 VkDeviceSize allocSize,
4825 virtual void FreeAtOffset(VkDeviceSize offset);
4828 uint32_t m_FreeCount;
4829 VkDeviceSize m_SumFreeSize;
4830 VmaSuballocationList m_Suballocations;
4833 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4835 bool ValidateFreeSuballocationList()
const;
4839 bool CheckAllocation(
4840 uint32_t currentFrameIndex,
4841 uint32_t frameInUseCount,
4842 VkDeviceSize bufferImageGranularity,
4843 VkDeviceSize allocSize,
4844 VkDeviceSize allocAlignment,
4845 VmaSuballocationType allocType,
4846 VmaSuballocationList::const_iterator suballocItem,
4847 bool canMakeOtherLost,
4848 VkDeviceSize* pOffset,
4849 size_t* itemsToMakeLostCount,
4850 VkDeviceSize* pSumFreeSize,
4851 VkDeviceSize* pSumItemSize)
const;
4853 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4857 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4860 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4863 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4944 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
4946 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
4949 virtual ~VmaBlockMetadata_Linear();
4950 virtual void Init(VkDeviceSize size);
4952 virtual bool Validate()
const;
4953 virtual size_t GetAllocationCount()
const;
4954 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4955 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4956 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
4958 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4959 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4961 #if VMA_STATS_STRING_ENABLED 4962 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4965 virtual bool CreateAllocationRequest(
4966 uint32_t currentFrameIndex,
4967 uint32_t frameInUseCount,
4968 VkDeviceSize bufferImageGranularity,
4969 VkDeviceSize allocSize,
4970 VkDeviceSize allocAlignment,
4972 VmaSuballocationType allocType,
4973 bool canMakeOtherLost,
4975 VmaAllocationRequest* pAllocationRequest);
4977 virtual bool MakeRequestedAllocationsLost(
4978 uint32_t currentFrameIndex,
4979 uint32_t frameInUseCount,
4980 VmaAllocationRequest* pAllocationRequest);
4982 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4984 virtual VkResult CheckCorruption(
const void* pBlockData);
4987 const VmaAllocationRequest& request,
4988 VmaSuballocationType type,
4989 VkDeviceSize allocSize,
4994 virtual void FreeAtOffset(VkDeviceSize offset);
5004 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5006 enum SECOND_VECTOR_MODE
5008 SECOND_VECTOR_EMPTY,
5013 SECOND_VECTOR_RING_BUFFER,
5019 SECOND_VECTOR_DOUBLE_STACK,
5022 VkDeviceSize m_SumFreeSize;
5023 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5024 uint32_t m_1stVectorIndex;
5025 SECOND_VECTOR_MODE m_2ndVectorMode;
5027 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5028 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5029 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5030 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5033 size_t m_1stNullItemsBeginCount;
5035 size_t m_1stNullItemsMiddleCount;
5037 size_t m_2ndNullItemsCount;
5039 bool ShouldCompact1st()
const;
5040 void CleanupAfterFree();
5054 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5056 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5059 virtual ~VmaBlockMetadata_Buddy();
5060 virtual void Init(VkDeviceSize size);
5062 virtual bool Validate()
const;
5063 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5064 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5065 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5066 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5068 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5069 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5071 #if VMA_STATS_STRING_ENABLED 5072 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5075 virtual bool CreateAllocationRequest(
5076 uint32_t currentFrameIndex,
5077 uint32_t frameInUseCount,
5078 VkDeviceSize bufferImageGranularity,
5079 VkDeviceSize allocSize,
5080 VkDeviceSize allocAlignment,
5082 VmaSuballocationType allocType,
5083 bool canMakeOtherLost,
5085 VmaAllocationRequest* pAllocationRequest);
5087 virtual bool MakeRequestedAllocationsLost(
5088 uint32_t currentFrameIndex,
5089 uint32_t frameInUseCount,
5090 VmaAllocationRequest* pAllocationRequest);
5092 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5094 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5097 const VmaAllocationRequest& request,
5098 VmaSuballocationType type,
5099 VkDeviceSize allocSize,
5103 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5104 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5107 static const VkDeviceSize MIN_NODE_SIZE = 32;
5108 static const size_t MAX_LEVELS = 30;
5110 struct ValidationContext
5112 size_t calculatedAllocationCount;
5113 size_t calculatedFreeCount;
5114 VkDeviceSize calculatedSumFreeSize;
5116 ValidationContext() :
5117 calculatedAllocationCount(0),
5118 calculatedFreeCount(0),
5119 calculatedSumFreeSize(0) { }
5124 VkDeviceSize offset;
5154 VkDeviceSize m_UsableSize;
5155 uint32_t m_LevelCount;
5161 } m_FreeList[MAX_LEVELS];
5163 size_t m_AllocationCount;
5167 VkDeviceSize m_SumFreeSize;
5169 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5170 void DeleteNode(Node* node);
5171 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5172 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5173 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5175 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5176 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5180 void AddToFreeListFront(uint32_t level, Node* node);
5184 void RemoveFromFreeList(uint32_t level, Node* node);
5186 #if VMA_STATS_STRING_ENABLED 5187 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5197 class VmaDeviceMemoryBlock
5199 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5201 VmaBlockMetadata* m_pMetadata;
5205 ~VmaDeviceMemoryBlock()
5207 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5208 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5214 uint32_t newMemoryTypeIndex,
5215 VkDeviceMemory newMemory,
5216 VkDeviceSize newSize,
5218 uint32_t algorithm);
5222 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5223 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5224 uint32_t GetId()
const {
return m_Id; }
5225 void* GetMappedData()
const {
return m_pMappedData; }
5228 bool Validate()
const;
5233 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5236 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5237 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5239 VkResult BindBufferMemory(
5243 VkResult BindImageMemory(
5249 uint32_t m_MemoryTypeIndex;
5251 VkDeviceMemory m_hMemory;
5256 uint32_t m_MapCount;
5257 void* m_pMappedData;
5260 struct VmaPointerLess
5262 bool operator()(
const void* lhs,
const void* rhs)
const 5268 class VmaDefragmentator;
5276 struct VmaBlockVector
5278 VMA_CLASS_NO_COPY(VmaBlockVector)
5282 uint32_t memoryTypeIndex,
5283 VkDeviceSize preferredBlockSize,
5284 size_t minBlockCount,
5285 size_t maxBlockCount,
5286 VkDeviceSize bufferImageGranularity,
5287 uint32_t frameInUseCount,
5289 bool explicitBlockSize,
5290 uint32_t algorithm);
5293 VkResult CreateMinBlocks();
5295 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5296 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5297 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5298 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5299 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5303 bool IsEmpty()
const {
return m_Blocks.empty(); }
5304 bool IsCorruptionDetectionEnabled()
const;
5308 uint32_t currentFrameIndex,
5310 VkDeviceSize alignment,
5312 VmaSuballocationType suballocType,
5321 #if VMA_STATS_STRING_ENABLED 5322 void PrintDetailedMap(
class VmaJsonWriter& json);
5325 void MakePoolAllocationsLost(
5326 uint32_t currentFrameIndex,
5327 size_t* pLostAllocationCount);
5328 VkResult CheckCorruption();
5330 VmaDefragmentator* EnsureDefragmentator(
5332 uint32_t currentFrameIndex);
5334 VkResult Defragment(
5336 VkDeviceSize& maxBytesToMove,
5337 uint32_t& maxAllocationsToMove);
5339 void DestroyDefragmentator();
5342 friend class VmaDefragmentator;
5345 const uint32_t m_MemoryTypeIndex;
5346 const VkDeviceSize m_PreferredBlockSize;
5347 const size_t m_MinBlockCount;
5348 const size_t m_MaxBlockCount;
5349 const VkDeviceSize m_BufferImageGranularity;
5350 const uint32_t m_FrameInUseCount;
5351 const bool m_IsCustomPool;
5352 const bool m_ExplicitBlockSize;
5353 const uint32_t m_Algorithm;
5354 bool m_HasEmptyBlock;
5357 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5361 VmaDefragmentator* m_pDefragmentator;
5362 uint32_t m_NextBlockId;
5364 VkDeviceSize CalcMaxBlockSize()
const;
5367 void Remove(VmaDeviceMemoryBlock* pBlock);
5371 void IncrementallySortBlocks();
5374 VkResult AllocateFromBlock(
5375 VmaDeviceMemoryBlock* pBlock,
5377 uint32_t currentFrameIndex,
5379 VkDeviceSize alignment,
5382 VmaSuballocationType suballocType,
5386 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5391 VMA_CLASS_NO_COPY(VmaPool_T)
5393 VmaBlockVector m_BlockVector;
5398 VkDeviceSize preferredBlockSize);
5401 uint32_t GetId()
const {
return m_Id; }
5402 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5404 #if VMA_STATS_STRING_ENABLED 5412 class VmaDefragmentator
5414 VMA_CLASS_NO_COPY(VmaDefragmentator)
5417 VmaBlockVector*
const m_pBlockVector;
5418 uint32_t m_CurrentFrameIndex;
5419 VkDeviceSize m_BytesMoved;
5420 uint32_t m_AllocationsMoved;
5422 struct AllocationInfo
5425 VkBool32* m_pChanged;
5428 m_hAllocation(VK_NULL_HANDLE),
5429 m_pChanged(VMA_NULL)
5434 struct AllocationInfoSizeGreater
5436 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5438 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5443 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5447 VmaDeviceMemoryBlock* m_pBlock;
5448 bool m_HasNonMovableAllocations;
5449 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5451 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5453 m_HasNonMovableAllocations(true),
5454 m_Allocations(pAllocationCallbacks),
5455 m_pMappedDataForDefragmentation(VMA_NULL)
5459 void CalcHasNonMovableAllocations()
5461 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5462 const size_t defragmentAllocCount = m_Allocations.size();
5463 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5466 void SortAllocationsBySizeDescecnding()
5468 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5471 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5476 void* m_pMappedDataForDefragmentation;
5479 struct BlockPointerLess
5481 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5483 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5485 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5487 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5493 struct BlockInfoCompareMoveDestination
5495 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5497 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5501 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5505 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5513 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5514 BlockInfoVector m_Blocks;
5516 VkResult DefragmentRound(
5517 VkDeviceSize maxBytesToMove,
5518 uint32_t maxAllocationsToMove);
5520 static bool MoveMakesSense(
5521 size_t dstBlockIndex, VkDeviceSize dstOffset,
5522 size_t srcBlockIndex, VkDeviceSize srcOffset);
5527 VmaBlockVector* pBlockVector,
5528 uint32_t currentFrameIndex);
5530 ~VmaDefragmentator();
5532 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5533 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5535 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5537 VkResult Defragment(
5538 VkDeviceSize maxBytesToMove,
5539 uint32_t maxAllocationsToMove);
5542 #if VMA_RECORDING_ENABLED 5549 void WriteConfiguration(
5550 const VkPhysicalDeviceProperties& devProps,
5551 const VkPhysicalDeviceMemoryProperties& memProps,
5552 bool dedicatedAllocationExtensionEnabled);
5555 void RecordCreateAllocator(uint32_t frameIndex);
5556 void RecordDestroyAllocator(uint32_t frameIndex);
5557 void RecordCreatePool(uint32_t frameIndex,
5560 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5561 void RecordAllocateMemory(uint32_t frameIndex,
5562 const VkMemoryRequirements& vkMemReq,
5565 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5566 const VkMemoryRequirements& vkMemReq,
5567 bool requiresDedicatedAllocation,
5568 bool prefersDedicatedAllocation,
5571 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5572 const VkMemoryRequirements& vkMemReq,
5573 bool requiresDedicatedAllocation,
5574 bool prefersDedicatedAllocation,
5577 void RecordFreeMemory(uint32_t frameIndex,
5579 void RecordSetAllocationUserData(uint32_t frameIndex,
5581 const void* pUserData);
5582 void RecordCreateLostAllocation(uint32_t frameIndex,
5584 void RecordMapMemory(uint32_t frameIndex,
5586 void RecordUnmapMemory(uint32_t frameIndex,
5588 void RecordFlushAllocation(uint32_t frameIndex,
5589 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5590 void RecordInvalidateAllocation(uint32_t frameIndex,
5591 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5592 void RecordCreateBuffer(uint32_t frameIndex,
5593 const VkBufferCreateInfo& bufCreateInfo,
5596 void RecordCreateImage(uint32_t frameIndex,
5597 const VkImageCreateInfo& imageCreateInfo,
5600 void RecordDestroyBuffer(uint32_t frameIndex,
5602 void RecordDestroyImage(uint32_t frameIndex,
5604 void RecordTouchAllocation(uint32_t frameIndex,
5606 void RecordGetAllocationInfo(uint32_t frameIndex,
5608 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5618 class UserDataString
5622 const char* GetString()
const {
return m_Str; }
5632 VMA_MUTEX m_FileMutex;
5634 int64_t m_StartCounter;
5636 void GetBasicParams(CallParams& outParams);
5640 #endif // #if VMA_RECORDING_ENABLED 5643 struct VmaAllocator_T
5645 VMA_CLASS_NO_COPY(VmaAllocator_T)
5648 bool m_UseKhrDedicatedAllocation;
5650 bool m_AllocationCallbacksSpecified;
5651 VkAllocationCallbacks m_AllocationCallbacks;
5655 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5656 VMA_MUTEX m_HeapSizeLimitMutex;
5658 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5659 VkPhysicalDeviceMemoryProperties m_MemProps;
5662 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5665 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5666 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5667 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5673 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5675 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5679 return m_VulkanFunctions;
5682 VkDeviceSize GetBufferImageGranularity()
const 5685 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5686 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5689 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5690 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5692 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5694 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5695 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5698 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5700 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5701 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5704 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5706 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5707 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5708 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5711 bool IsIntegratedGpu()
const 5713 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5716 #if VMA_RECORDING_ENABLED 5717 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5720 void GetBufferMemoryRequirements(
5722 VkMemoryRequirements& memReq,
5723 bool& requiresDedicatedAllocation,
5724 bool& prefersDedicatedAllocation)
const;
5725 void GetImageMemoryRequirements(
5727 VkMemoryRequirements& memReq,
5728 bool& requiresDedicatedAllocation,
5729 bool& prefersDedicatedAllocation)
const;
5732 VkResult AllocateMemory(
5733 const VkMemoryRequirements& vkMemReq,
5734 bool requiresDedicatedAllocation,
5735 bool prefersDedicatedAllocation,
5736 VkBuffer dedicatedBuffer,
5737 VkImage dedicatedImage,
5739 VmaSuballocationType suballocType,
5745 void CalculateStats(
VmaStats* pStats);
5747 #if VMA_STATS_STRING_ENABLED 5748 void PrintDetailedMap(
class VmaJsonWriter& json);
5751 VkResult Defragment(
5753 size_t allocationCount,
5754 VkBool32* pAllocationsChanged,
5762 void DestroyPool(
VmaPool pool);
5765 void SetCurrentFrameIndex(uint32_t frameIndex);
5766 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5768 void MakePoolAllocationsLost(
5770 size_t* pLostAllocationCount);
5771 VkResult CheckPoolCorruption(
VmaPool hPool);
5772 VkResult CheckCorruption(uint32_t memoryTypeBits);
5776 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5777 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5782 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5783 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5785 void FlushOrInvalidateAllocation(
5787 VkDeviceSize offset, VkDeviceSize size,
5788 VMA_CACHE_OPERATION op);
5790 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5793 VkDeviceSize m_PreferredLargeHeapBlockSize;
5795 VkPhysicalDevice m_PhysicalDevice;
5796 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5798 VMA_MUTEX m_PoolsMutex;
5800 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5801 uint32_t m_NextPoolId;
5805 #if VMA_RECORDING_ENABLED 5806 VmaRecorder* m_pRecorder;
5811 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5813 VkResult AllocateMemoryOfType(
5815 VkDeviceSize alignment,
5816 bool dedicatedAllocation,
5817 VkBuffer dedicatedBuffer,
5818 VkImage dedicatedImage,
5820 uint32_t memTypeIndex,
5821 VmaSuballocationType suballocType,
5825 VkResult AllocateDedicatedMemory(
5827 VmaSuballocationType suballocType,
5828 uint32_t memTypeIndex,
5830 bool isUserDataString,
5832 VkBuffer dedicatedBuffer,
5833 VkImage dedicatedImage,
5843 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5845 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5848 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5850 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5853 template<
typename T>
5856 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5859 template<
typename T>
5860 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5862 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5865 template<
typename T>
5866 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5871 VmaFree(hAllocator, ptr);
5875 template<
typename T>
5876 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5880 for(
size_t i = count; i--; )
5882 VmaFree(hAllocator, ptr);
5889 #if VMA_STATS_STRING_ENABLED 5891 class VmaStringBuilder
5894 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5895 size_t GetLength()
const {
return m_Data.size(); }
5896 const char* GetData()
const {
return m_Data.data(); }
5898 void Add(
char ch) { m_Data.push_back(ch); }
5899 void Add(
const char* pStr);
5900 void AddNewLine() { Add(
'\n'); }
5901 void AddNumber(uint32_t num);
5902 void AddNumber(uint64_t num);
5903 void AddPointer(
const void* ptr);
5906 VmaVector< char, VmaStlAllocator<char> > m_Data;
5909 void VmaStringBuilder::Add(
const char* pStr)
5911 const size_t strLen = strlen(pStr);
5914 const size_t oldCount = m_Data.size();
5915 m_Data.resize(oldCount + strLen);
5916 memcpy(m_Data.data() + oldCount, pStr, strLen);
5920 void VmaStringBuilder::AddNumber(uint32_t num)
5923 VmaUint32ToStr(buf,
sizeof(buf), num);
5927 void VmaStringBuilder::AddNumber(uint64_t num)
5930 VmaUint64ToStr(buf,
sizeof(buf), num);
5934 void VmaStringBuilder::AddPointer(
const void* ptr)
5937 VmaPtrToStr(buf,
sizeof(buf), ptr);
5941 #endif // #if VMA_STATS_STRING_ENABLED 5946 #if VMA_STATS_STRING_ENABLED 5950 VMA_CLASS_NO_COPY(VmaJsonWriter)
5952 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
5955 void BeginObject(
bool singleLine =
false);
5958 void BeginArray(
bool singleLine =
false);
5961 void WriteString(
const char* pStr);
5962 void BeginString(
const char* pStr = VMA_NULL);
5963 void ContinueString(
const char* pStr);
5964 void ContinueString(uint32_t n);
5965 void ContinueString(uint64_t n);
5966 void ContinueString_Pointer(
const void* ptr);
5967 void EndString(
const char* pStr = VMA_NULL);
5969 void WriteNumber(uint32_t n);
5970 void WriteNumber(uint64_t n);
5971 void WriteBool(
bool b);
5975 static const char*
const INDENT;
5977 enum COLLECTION_TYPE
5979 COLLECTION_TYPE_OBJECT,
5980 COLLECTION_TYPE_ARRAY,
5984 COLLECTION_TYPE type;
5985 uint32_t valueCount;
5986 bool singleLineMode;
5989 VmaStringBuilder& m_SB;
5990 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
5991 bool m_InsideString;
5993 void BeginValue(
bool isString);
5994 void WriteIndent(
bool oneLess =
false);
5997 const char*
const VmaJsonWriter::INDENT =
" ";
5999 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6001 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6002 m_InsideString(false)
6006 VmaJsonWriter::~VmaJsonWriter()
6008 VMA_ASSERT(!m_InsideString);
6009 VMA_ASSERT(m_Stack.empty());
6012 void VmaJsonWriter::BeginObject(
bool singleLine)
6014 VMA_ASSERT(!m_InsideString);
6020 item.type = COLLECTION_TYPE_OBJECT;
6021 item.valueCount = 0;
6022 item.singleLineMode = singleLine;
6023 m_Stack.push_back(item);
6026 void VmaJsonWriter::EndObject()
6028 VMA_ASSERT(!m_InsideString);
6033 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6037 void VmaJsonWriter::BeginArray(
bool singleLine)
6039 VMA_ASSERT(!m_InsideString);
6045 item.type = COLLECTION_TYPE_ARRAY;
6046 item.valueCount = 0;
6047 item.singleLineMode = singleLine;
6048 m_Stack.push_back(item);
6051 void VmaJsonWriter::EndArray()
6053 VMA_ASSERT(!m_InsideString);
6058 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6062 void VmaJsonWriter::WriteString(
const char* pStr)
6068 void VmaJsonWriter::BeginString(
const char* pStr)
6070 VMA_ASSERT(!m_InsideString);
6074 m_InsideString =
true;
6075 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6077 ContinueString(pStr);
6081 void VmaJsonWriter::ContinueString(
const char* pStr)
6083 VMA_ASSERT(m_InsideString);
6085 const size_t strLen = strlen(pStr);
6086 for(
size_t i = 0; i < strLen; ++i)
6119 VMA_ASSERT(0 &&
"Character not currently supported.");
6125 void VmaJsonWriter::ContinueString(uint32_t n)
6127 VMA_ASSERT(m_InsideString);
6131 void VmaJsonWriter::ContinueString(uint64_t n)
6133 VMA_ASSERT(m_InsideString);
6137 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6139 VMA_ASSERT(m_InsideString);
6140 m_SB.AddPointer(ptr);
6143 void VmaJsonWriter::EndString(
const char* pStr)
6145 VMA_ASSERT(m_InsideString);
6146 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6148 ContinueString(pStr);
6151 m_InsideString =
false;
6154 void VmaJsonWriter::WriteNumber(uint32_t n)
6156 VMA_ASSERT(!m_InsideString);
6161 void VmaJsonWriter::WriteNumber(uint64_t n)
6163 VMA_ASSERT(!m_InsideString);
6168 void VmaJsonWriter::WriteBool(
bool b)
6170 VMA_ASSERT(!m_InsideString);
6172 m_SB.Add(b ?
"true" :
"false");
6175 void VmaJsonWriter::WriteNull()
6177 VMA_ASSERT(!m_InsideString);
6182 void VmaJsonWriter::BeginValue(
bool isString)
6184 if(!m_Stack.empty())
6186 StackItem& currItem = m_Stack.back();
6187 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6188 currItem.valueCount % 2 == 0)
6190 VMA_ASSERT(isString);
6193 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6194 currItem.valueCount % 2 != 0)
6198 else if(currItem.valueCount > 0)
6207 ++currItem.valueCount;
6211 void VmaJsonWriter::WriteIndent(
bool oneLess)
6213 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6217 size_t count = m_Stack.size();
6218 if(count > 0 && oneLess)
6222 for(
size_t i = 0; i < count; ++i)
6229 #endif // #if VMA_STATS_STRING_ENABLED 6233 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
6235 if(IsUserDataString())
6237 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
6239 FreeUserDataString(hAllocator);
6241 if(pUserData != VMA_NULL)
6243 const char*
const newStrSrc = (
char*)pUserData;
6244 const size_t newStrLen = strlen(newStrSrc);
6245 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
6246 memcpy(newStrDst, newStrSrc, newStrLen + 1);
6247 m_pUserData = newStrDst;
6252 m_pUserData = pUserData;
6256 void VmaAllocation_T::ChangeBlockAllocation(
6258 VmaDeviceMemoryBlock* block,
6259 VkDeviceSize offset)
6261 VMA_ASSERT(block != VMA_NULL);
6262 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6265 if(block != m_BlockAllocation.m_Block)
6267 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
6268 if(IsPersistentMap())
6270 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
6271 block->Map(hAllocator, mapRefCount, VMA_NULL);
6274 m_BlockAllocation.m_Block = block;
6275 m_BlockAllocation.m_Offset = offset;
6278 VkDeviceSize VmaAllocation_T::GetOffset()
const 6282 case ALLOCATION_TYPE_BLOCK:
6283 return m_BlockAllocation.m_Offset;
6284 case ALLOCATION_TYPE_DEDICATED:
6292 VkDeviceMemory VmaAllocation_T::GetMemory()
const 6296 case ALLOCATION_TYPE_BLOCK:
6297 return m_BlockAllocation.m_Block->GetDeviceMemory();
6298 case ALLOCATION_TYPE_DEDICATED:
6299 return m_DedicatedAllocation.m_hMemory;
6302 return VK_NULL_HANDLE;
6306 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 6310 case ALLOCATION_TYPE_BLOCK:
6311 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
6312 case ALLOCATION_TYPE_DEDICATED:
6313 return m_DedicatedAllocation.m_MemoryTypeIndex;
6320 void* VmaAllocation_T::GetMappedData()
const 6324 case ALLOCATION_TYPE_BLOCK:
6327 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
6328 VMA_ASSERT(pBlockData != VMA_NULL);
6329 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
6336 case ALLOCATION_TYPE_DEDICATED:
6337 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
6338 return m_DedicatedAllocation.m_pMappedData;
6345 bool VmaAllocation_T::CanBecomeLost()
const 6349 case ALLOCATION_TYPE_BLOCK:
6350 return m_BlockAllocation.m_CanBecomeLost;
6351 case ALLOCATION_TYPE_DEDICATED:
6359 VmaPool VmaAllocation_T::GetPool()
const 6361 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6362 return m_BlockAllocation.m_hPool;
6365 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6367 VMA_ASSERT(CanBecomeLost());
6373 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6376 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6381 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6387 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6397 #if VMA_STATS_STRING_ENABLED 6400 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6409 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6411 json.WriteString(
"Type");
6412 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6414 json.WriteString(
"Size");
6415 json.WriteNumber(m_Size);
6417 if(m_pUserData != VMA_NULL)
6419 json.WriteString(
"UserData");
6420 if(IsUserDataString())
6422 json.WriteString((
const char*)m_pUserData);
6427 json.ContinueString_Pointer(m_pUserData);
6432 json.WriteString(
"CreationFrameIndex");
6433 json.WriteNumber(m_CreationFrameIndex);
6435 json.WriteString(
"LastUseFrameIndex");
6436 json.WriteNumber(GetLastUseFrameIndex());
6438 if(m_BufferImageUsage != 0)
6440 json.WriteString(
"Usage");
6441 json.WriteNumber(m_BufferImageUsage);
6447 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6449 VMA_ASSERT(IsUserDataString());
6450 if(m_pUserData != VMA_NULL)
6452 char*
const oldStr = (
char*)m_pUserData;
6453 const size_t oldStrLen = strlen(oldStr);
6454 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6455 m_pUserData = VMA_NULL;
6459 void VmaAllocation_T::BlockAllocMap()
6461 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6463 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6469 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6473 void VmaAllocation_T::BlockAllocUnmap()
6475 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6477 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6483 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6487 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6489 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6493 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6495 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6496 *ppData = m_DedicatedAllocation.m_pMappedData;
6502 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6503 return VK_ERROR_MEMORY_MAP_FAILED;
6508 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6509 hAllocator->m_hDevice,
6510 m_DedicatedAllocation.m_hMemory,
6515 if(result == VK_SUCCESS)
6517 m_DedicatedAllocation.m_pMappedData = *ppData;
6524 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6526 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6528 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6533 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6534 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6535 hAllocator->m_hDevice,
6536 m_DedicatedAllocation.m_hMemory);
6541 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6545 #if VMA_STATS_STRING_ENABLED 6547 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6551 json.WriteString(
"Blocks");
6554 json.WriteString(
"Allocations");
6557 json.WriteString(
"UnusedRanges");
6560 json.WriteString(
"UsedBytes");
6563 json.WriteString(
"UnusedBytes");
6568 json.WriteString(
"AllocationSize");
6569 json.BeginObject(
true);
6570 json.WriteString(
"Min");
6572 json.WriteString(
"Avg");
6574 json.WriteString(
"Max");
6581 json.WriteString(
"UnusedRangeSize");
6582 json.BeginObject(
true);
6583 json.WriteString(
"Min");
6585 json.WriteString(
"Avg");
6587 json.WriteString(
"Max");
6595 #endif // #if VMA_STATS_STRING_ENABLED 6597 struct VmaSuballocationItemSizeLess
6600 const VmaSuballocationList::iterator lhs,
6601 const VmaSuballocationList::iterator rhs)
const 6603 return lhs->size < rhs->size;
6606 const VmaSuballocationList::iterator lhs,
6607 VkDeviceSize rhsSize)
const 6609 return lhs->size < rhsSize;
6617 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
6619 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
6623 #if VMA_STATS_STRING_ENABLED 6625 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6626 VkDeviceSize unusedBytes,
6627 size_t allocationCount,
6628 size_t unusedRangeCount)
const 6632 json.WriteString(
"TotalBytes");
6633 json.WriteNumber(GetSize());
6635 json.WriteString(
"UnusedBytes");
6636 json.WriteNumber(unusedBytes);
6638 json.WriteString(
"Allocations");
6639 json.WriteNumber((uint64_t)allocationCount);
6641 json.WriteString(
"UnusedRanges");
6642 json.WriteNumber((uint64_t)unusedRangeCount);
6644 json.WriteString(
"Suballocations");
6648 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6649 VkDeviceSize offset,
6652 json.BeginObject(
true);
6654 json.WriteString(
"Offset");
6655 json.WriteNumber(offset);
6657 hAllocation->PrintParameters(json);
6662 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6663 VkDeviceSize offset,
6664 VkDeviceSize size)
const 6666 json.BeginObject(
true);
6668 json.WriteString(
"Offset");
6669 json.WriteNumber(offset);
6671 json.WriteString(
"Type");
6672 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6674 json.WriteString(
"Size");
6675 json.WriteNumber(size);
6680 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6686 #endif // #if VMA_STATS_STRING_ENABLED 6691 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6692 VmaBlockMetadata(hAllocator),
6695 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6696 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6700 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6704 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6706 VmaBlockMetadata::Init(size);
6709 m_SumFreeSize = size;
6711 VmaSuballocation suballoc = {};
6712 suballoc.offset = 0;
6713 suballoc.size = size;
6714 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6715 suballoc.hAllocation = VK_NULL_HANDLE;
6717 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
6718 m_Suballocations.push_back(suballoc);
6719 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6721 m_FreeSuballocationsBySize.push_back(suballocItem);
6724 bool VmaBlockMetadata_Generic::Validate()
const 6726 VMA_VALIDATE(!m_Suballocations.empty());
6729 VkDeviceSize calculatedOffset = 0;
6731 uint32_t calculatedFreeCount = 0;
6733 VkDeviceSize calculatedSumFreeSize = 0;
6736 size_t freeSuballocationsToRegister = 0;
6738 bool prevFree =
false;
6740 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6741 suballocItem != m_Suballocations.cend();
6744 const VmaSuballocation& subAlloc = *suballocItem;
6747 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
6749 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6751 VMA_VALIDATE(!prevFree || !currFree);
6753 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
6757 calculatedSumFreeSize += subAlloc.size;
6758 ++calculatedFreeCount;
6759 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6761 ++freeSuballocationsToRegister;
6765 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
6769 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
6770 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
6773 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
6776 calculatedOffset += subAlloc.size;
6777 prevFree = currFree;
6782 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
6784 VkDeviceSize lastSize = 0;
6785 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6787 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6790 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6792 VMA_VALIDATE(suballocItem->size >= lastSize);
6794 lastSize = suballocItem->size;
6798 VMA_VALIDATE(ValidateFreeSuballocationList());
6799 VMA_VALIDATE(calculatedOffset == GetSize());
6800 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
6801 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
6806 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6808 if(!m_FreeSuballocationsBySize.empty())
6810 return m_FreeSuballocationsBySize.back()->size;
6818 bool VmaBlockMetadata_Generic::IsEmpty()
const 6820 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6823 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6827 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6839 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6840 suballocItem != m_Suballocations.cend();
6843 const VmaSuballocation& suballoc = *suballocItem;
6844 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6857 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6859 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6861 inoutStats.
size += GetSize();
6868 #if VMA_STATS_STRING_ENABLED 6870 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6872 PrintDetailedMap_Begin(json,
6874 m_Suballocations.size() - (size_t)m_FreeCount,
6878 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6879 suballocItem != m_Suballocations.cend();
6880 ++suballocItem, ++i)
6882 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6884 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6888 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6892 PrintDetailedMap_End(json);
6895 #endif // #if VMA_STATS_STRING_ENABLED 6897 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6898 uint32_t currentFrameIndex,
6899 uint32_t frameInUseCount,
6900 VkDeviceSize bufferImageGranularity,
6901 VkDeviceSize allocSize,
6902 VkDeviceSize allocAlignment,
6904 VmaSuballocationType allocType,
6905 bool canMakeOtherLost,
6907 VmaAllocationRequest* pAllocationRequest)
6909 VMA_ASSERT(allocSize > 0);
6910 VMA_ASSERT(!upperAddress);
6911 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6912 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6913 VMA_HEAVY_ASSERT(Validate());
6916 if(canMakeOtherLost ==
false &&
6917 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6923 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6924 if(freeSuballocCount > 0)
6929 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6930 m_FreeSuballocationsBySize.data(),
6931 m_FreeSuballocationsBySize.data() + freeSuballocCount,
6932 allocSize + 2 * VMA_DEBUG_MARGIN,
6933 VmaSuballocationItemSizeLess());
6934 size_t index = it - m_FreeSuballocationsBySize.data();
6935 for(; index < freeSuballocCount; ++index)
6940 bufferImageGranularity,
6944 m_FreeSuballocationsBySize[index],
6946 &pAllocationRequest->offset,
6947 &pAllocationRequest->itemsToMakeLostCount,
6948 &pAllocationRequest->sumFreeSize,
6949 &pAllocationRequest->sumItemSize))
6951 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6959 for(
size_t index = freeSuballocCount; index--; )
6964 bufferImageGranularity,
6968 m_FreeSuballocationsBySize[index],
6970 &pAllocationRequest->offset,
6971 &pAllocationRequest->itemsToMakeLostCount,
6972 &pAllocationRequest->sumFreeSize,
6973 &pAllocationRequest->sumItemSize))
6975 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6982 if(canMakeOtherLost)
6986 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
6987 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
6989 VmaAllocationRequest tmpAllocRequest = {};
6990 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
6991 suballocIt != m_Suballocations.end();
6994 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
6995 suballocIt->hAllocation->CanBecomeLost())
7000 bufferImageGranularity,
7006 &tmpAllocRequest.offset,
7007 &tmpAllocRequest.itemsToMakeLostCount,
7008 &tmpAllocRequest.sumFreeSize,
7009 &tmpAllocRequest.sumItemSize))
7011 tmpAllocRequest.item = suballocIt;
7013 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7016 *pAllocationRequest = tmpAllocRequest;
7022 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7031 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7032 uint32_t currentFrameIndex,
7033 uint32_t frameInUseCount,
7034 VmaAllocationRequest* pAllocationRequest)
7036 while(pAllocationRequest->itemsToMakeLostCount > 0)
7038 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7040 ++pAllocationRequest->item;
7042 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7043 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7044 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7045 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7047 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7048 --pAllocationRequest->itemsToMakeLostCount;
7056 VMA_HEAVY_ASSERT(Validate());
7057 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7058 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7063 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7065 uint32_t lostAllocationCount = 0;
7066 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7067 it != m_Suballocations.end();
7070 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7071 it->hAllocation->CanBecomeLost() &&
7072 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7074 it = FreeSuballocation(it);
7075 ++lostAllocationCount;
7078 return lostAllocationCount;
7081 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7083 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7084 it != m_Suballocations.end();
7087 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7089 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7091 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7092 return VK_ERROR_VALIDATION_FAILED_EXT;
7094 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7096 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7097 return VK_ERROR_VALIDATION_FAILED_EXT;
7105 void VmaBlockMetadata_Generic::Alloc(
7106 const VmaAllocationRequest& request,
7107 VmaSuballocationType type,
7108 VkDeviceSize allocSize,
7112 VMA_ASSERT(!upperAddress);
7113 VMA_ASSERT(request.item != m_Suballocations.end());
7114 VmaSuballocation& suballoc = *request.item;
7116 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7118 VMA_ASSERT(request.offset >= suballoc.offset);
7119 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7120 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7121 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7125 UnregisterFreeSuballocation(request.item);
7127 suballoc.offset = request.offset;
7128 suballoc.size = allocSize;
7129 suballoc.type = type;
7130 suballoc.hAllocation = hAllocation;
7135 VmaSuballocation paddingSuballoc = {};
7136 paddingSuballoc.offset = request.offset + allocSize;
7137 paddingSuballoc.size = paddingEnd;
7138 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7139 VmaSuballocationList::iterator next = request.item;
7141 const VmaSuballocationList::iterator paddingEndItem =
7142 m_Suballocations.insert(next, paddingSuballoc);
7143 RegisterFreeSuballocation(paddingEndItem);
7149 VmaSuballocation paddingSuballoc = {};
7150 paddingSuballoc.offset = request.offset - paddingBegin;
7151 paddingSuballoc.size = paddingBegin;
7152 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7153 const VmaSuballocationList::iterator paddingBeginItem =
7154 m_Suballocations.insert(request.item, paddingSuballoc);
7155 RegisterFreeSuballocation(paddingBeginItem);
7159 m_FreeCount = m_FreeCount - 1;
7160 if(paddingBegin > 0)
7168 m_SumFreeSize -= allocSize;
7171 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7173 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7174 suballocItem != m_Suballocations.end();
7177 VmaSuballocation& suballoc = *suballocItem;
7178 if(suballoc.hAllocation == allocation)
7180 FreeSuballocation(suballocItem);
7181 VMA_HEAVY_ASSERT(Validate());
7185 VMA_ASSERT(0 &&
"Not found!");
7188 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7190 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7191 suballocItem != m_Suballocations.end();
7194 VmaSuballocation& suballoc = *suballocItem;
7195 if(suballoc.offset == offset)
7197 FreeSuballocation(suballocItem);
7201 VMA_ASSERT(0 &&
"Not found!");
7204 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 7206 VkDeviceSize lastSize = 0;
7207 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7209 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7211 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
7212 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7213 VMA_VALIDATE(it->size >= lastSize);
7214 lastSize = it->size;
7219 bool VmaBlockMetadata_Generic::CheckAllocation(
7220 uint32_t currentFrameIndex,
7221 uint32_t frameInUseCount,
7222 VkDeviceSize bufferImageGranularity,
7223 VkDeviceSize allocSize,
7224 VkDeviceSize allocAlignment,
7225 VmaSuballocationType allocType,
7226 VmaSuballocationList::const_iterator suballocItem,
7227 bool canMakeOtherLost,
7228 VkDeviceSize* pOffset,
7229 size_t* itemsToMakeLostCount,
7230 VkDeviceSize* pSumFreeSize,
7231 VkDeviceSize* pSumItemSize)
const 7233 VMA_ASSERT(allocSize > 0);
7234 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7235 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7236 VMA_ASSERT(pOffset != VMA_NULL);
7238 *itemsToMakeLostCount = 0;
7242 if(canMakeOtherLost)
7244 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7246 *pSumFreeSize = suballocItem->size;
7250 if(suballocItem->hAllocation->CanBecomeLost() &&
7251 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7253 ++*itemsToMakeLostCount;
7254 *pSumItemSize = suballocItem->size;
7263 if(GetSize() - suballocItem->offset < allocSize)
7269 *pOffset = suballocItem->offset;
7272 if(VMA_DEBUG_MARGIN > 0)
7274 *pOffset += VMA_DEBUG_MARGIN;
7278 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7282 if(bufferImageGranularity > 1)
7284 bool bufferImageGranularityConflict =
false;
7285 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7286 while(prevSuballocItem != m_Suballocations.cbegin())
7289 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7290 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7292 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7294 bufferImageGranularityConflict =
true;
7302 if(bufferImageGranularityConflict)
7304 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7310 if(*pOffset >= suballocItem->offset + suballocItem->size)
7316 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
7319 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7321 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
7323 if(suballocItem->offset + totalSize > GetSize())
7330 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7331 if(totalSize > suballocItem->size)
7333 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7334 while(remainingSize > 0)
7337 if(lastSuballocItem == m_Suballocations.cend())
7341 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7343 *pSumFreeSize += lastSuballocItem->size;
7347 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7348 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7349 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7351 ++*itemsToMakeLostCount;
7352 *pSumItemSize += lastSuballocItem->size;
7359 remainingSize = (lastSuballocItem->size < remainingSize) ?
7360 remainingSize - lastSuballocItem->size : 0;
7366 if(bufferImageGranularity > 1)
7368 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7370 while(nextSuballocItem != m_Suballocations.cend())
7372 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7373 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7375 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7377 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7378 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7379 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7381 ++*itemsToMakeLostCount;
7400 const VmaSuballocation& suballoc = *suballocItem;
7401 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7403 *pSumFreeSize = suballoc.size;
7406 if(suballoc.size < allocSize)
7412 *pOffset = suballoc.offset;
7415 if(VMA_DEBUG_MARGIN > 0)
7417 *pOffset += VMA_DEBUG_MARGIN;
7421 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7425 if(bufferImageGranularity > 1)
7427 bool bufferImageGranularityConflict =
false;
7428 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7429 while(prevSuballocItem != m_Suballocations.cbegin())
7432 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7433 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7435 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7437 bufferImageGranularityConflict =
true;
7445 if(bufferImageGranularityConflict)
7447 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7452 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7455 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7458 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7465 if(bufferImageGranularity > 1)
7467 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7469 while(nextSuballocItem != m_Suballocations.cend())
7471 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7472 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7474 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7493 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7495 VMA_ASSERT(item != m_Suballocations.end());
7496 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7498 VmaSuballocationList::iterator nextItem = item;
7500 VMA_ASSERT(nextItem != m_Suballocations.end());
7501 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7503 item->size += nextItem->size;
7505 m_Suballocations.erase(nextItem);
7508 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7511 VmaSuballocation& suballoc = *suballocItem;
7512 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7513 suballoc.hAllocation = VK_NULL_HANDLE;
7517 m_SumFreeSize += suballoc.size;
7520 bool mergeWithNext =
false;
7521 bool mergeWithPrev =
false;
7523 VmaSuballocationList::iterator nextItem = suballocItem;
7525 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7527 mergeWithNext =
true;
7530 VmaSuballocationList::iterator prevItem = suballocItem;
7531 if(suballocItem != m_Suballocations.begin())
7534 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7536 mergeWithPrev =
true;
7542 UnregisterFreeSuballocation(nextItem);
7543 MergeFreeWithNext(suballocItem);
7548 UnregisterFreeSuballocation(prevItem);
7549 MergeFreeWithNext(prevItem);
7550 RegisterFreeSuballocation(prevItem);
7555 RegisterFreeSuballocation(suballocItem);
7556 return suballocItem;
7560 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7562 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7563 VMA_ASSERT(item->size > 0);
7567 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7569 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7571 if(m_FreeSuballocationsBySize.empty())
7573 m_FreeSuballocationsBySize.push_back(item);
7577 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7585 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7587 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7588 VMA_ASSERT(item->size > 0);
7592 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7594 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7596 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7597 m_FreeSuballocationsBySize.data(),
7598 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7600 VmaSuballocationItemSizeLess());
7601 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7602 index < m_FreeSuballocationsBySize.size();
7605 if(m_FreeSuballocationsBySize[index] == item)
7607 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7610 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7612 VMA_ASSERT(0 &&
"Not found.");
7621 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7622 VmaBlockMetadata(hAllocator),
7624 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7625 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7626 m_1stVectorIndex(0),
7627 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7628 m_1stNullItemsBeginCount(0),
7629 m_1stNullItemsMiddleCount(0),
7630 m_2ndNullItemsCount(0)
7634 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7638 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7640 VmaBlockMetadata::Init(size);
7641 m_SumFreeSize = size;
7644 bool VmaBlockMetadata_Linear::Validate()
const 7646 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7647 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7649 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
7650 VMA_VALIDATE(!suballocations1st.empty() ||
7651 suballocations2nd.empty() ||
7652 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
7654 if(!suballocations1st.empty())
7657 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
7659 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
7661 if(!suballocations2nd.empty())
7664 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
7667 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
7668 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
7670 VkDeviceSize sumUsedSize = 0;
7671 const size_t suballoc1stCount = suballocations1st.size();
7672 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7674 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7676 const size_t suballoc2ndCount = suballocations2nd.size();
7677 size_t nullItem2ndCount = 0;
7678 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7680 const VmaSuballocation& suballoc = suballocations2nd[i];
7681 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7683 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7684 VMA_VALIDATE(suballoc.offset >= offset);
7688 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7689 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7690 sumUsedSize += suballoc.size;
7697 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7700 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7703 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7705 const VmaSuballocation& suballoc = suballocations1st[i];
7706 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
7707 suballoc.hAllocation == VK_NULL_HANDLE);
7710 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7712 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7714 const VmaSuballocation& suballoc = suballocations1st[i];
7715 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7717 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7718 VMA_VALIDATE(suballoc.offset >= offset);
7719 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
7723 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7724 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7725 sumUsedSize += suballoc.size;
7732 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7734 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
7736 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7738 const size_t suballoc2ndCount = suballocations2nd.size();
7739 size_t nullItem2ndCount = 0;
7740 for(
size_t i = suballoc2ndCount; i--; )
7742 const VmaSuballocation& suballoc = suballocations2nd[i];
7743 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7745 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7746 VMA_VALIDATE(suballoc.offset >= offset);
7750 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7751 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7752 sumUsedSize += suballoc.size;
7759 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7762 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7765 VMA_VALIDATE(offset <= GetSize());
7766 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
7771 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7773 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7774 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7777 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7779 const VkDeviceSize size = GetSize();
7791 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7793 switch(m_2ndVectorMode)
7795 case SECOND_VECTOR_EMPTY:
7801 const size_t suballocations1stCount = suballocations1st.size();
7802 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
7803 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
7804 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
7806 firstSuballoc.offset,
7807 size - (lastSuballoc.offset + lastSuballoc.size));
7811 case SECOND_VECTOR_RING_BUFFER:
7816 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7817 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
7818 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
7819 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
7823 case SECOND_VECTOR_DOUBLE_STACK:
7828 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7829 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
7830 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
7831 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
7841 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7843 const VkDeviceSize size = GetSize();
7844 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7845 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7846 const size_t suballoc1stCount = suballocations1st.size();
7847 const size_t suballoc2ndCount = suballocations2nd.size();
7858 VkDeviceSize lastOffset = 0;
7860 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7862 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7863 size_t nextAlloc2ndIndex = 0;
7864 while(lastOffset < freeSpace2ndTo1stEnd)
7867 while(nextAlloc2ndIndex < suballoc2ndCount &&
7868 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7870 ++nextAlloc2ndIndex;
7874 if(nextAlloc2ndIndex < suballoc2ndCount)
7876 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7879 if(lastOffset < suballoc.offset)
7882 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7896 lastOffset = suballoc.offset + suballoc.size;
7897 ++nextAlloc2ndIndex;
7903 if(lastOffset < freeSpace2ndTo1stEnd)
7905 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7913 lastOffset = freeSpace2ndTo1stEnd;
7918 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7919 const VkDeviceSize freeSpace1stTo2ndEnd =
7920 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
7921 while(lastOffset < freeSpace1stTo2ndEnd)
7924 while(nextAlloc1stIndex < suballoc1stCount &&
7925 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
7927 ++nextAlloc1stIndex;
7931 if(nextAlloc1stIndex < suballoc1stCount)
7933 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
7936 if(lastOffset < suballoc.offset)
7939 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7953 lastOffset = suballoc.offset + suballoc.size;
7954 ++nextAlloc1stIndex;
7960 if(lastOffset < freeSpace1stTo2ndEnd)
7962 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
7970 lastOffset = freeSpace1stTo2ndEnd;
7974 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7976 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
7977 while(lastOffset < size)
7980 while(nextAlloc2ndIndex != SIZE_MAX &&
7981 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7983 --nextAlloc2ndIndex;
7987 if(nextAlloc2ndIndex != SIZE_MAX)
7989 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7992 if(lastOffset < suballoc.offset)
7995 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8009 lastOffset = suballoc.offset + suballoc.size;
8010 --nextAlloc2ndIndex;
8016 if(lastOffset < size)
8018 const VkDeviceSize unusedRangeSize = size - lastOffset;
8034 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8036 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8037 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8038 const VkDeviceSize size = GetSize();
8039 const size_t suballoc1stCount = suballocations1st.size();
8040 const size_t suballoc2ndCount = suballocations2nd.size();
8042 inoutStats.
size += size;
8044 VkDeviceSize lastOffset = 0;
8046 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8048 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8049 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8050 while(lastOffset < freeSpace2ndTo1stEnd)
8053 while(nextAlloc2ndIndex < suballoc2ndCount &&
8054 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8056 ++nextAlloc2ndIndex;
8060 if(nextAlloc2ndIndex < suballoc2ndCount)
8062 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8065 if(lastOffset < suballoc.offset)
8068 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8079 lastOffset = suballoc.offset + suballoc.size;
8080 ++nextAlloc2ndIndex;
8085 if(lastOffset < freeSpace2ndTo1stEnd)
8088 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8095 lastOffset = freeSpace2ndTo1stEnd;
8100 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8101 const VkDeviceSize freeSpace1stTo2ndEnd =
8102 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8103 while(lastOffset < freeSpace1stTo2ndEnd)
8106 while(nextAlloc1stIndex < suballoc1stCount &&
8107 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8109 ++nextAlloc1stIndex;
8113 if(nextAlloc1stIndex < suballoc1stCount)
8115 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8118 if(lastOffset < suballoc.offset)
8121 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8132 lastOffset = suballoc.offset + suballoc.size;
8133 ++nextAlloc1stIndex;
8138 if(lastOffset < freeSpace1stTo2ndEnd)
8141 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8148 lastOffset = freeSpace1stTo2ndEnd;
8152 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8154 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8155 while(lastOffset < size)
8158 while(nextAlloc2ndIndex != SIZE_MAX &&
8159 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8161 --nextAlloc2ndIndex;
8165 if(nextAlloc2ndIndex != SIZE_MAX)
8167 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8170 if(lastOffset < suballoc.offset)
8173 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8184 lastOffset = suballoc.offset + suballoc.size;
8185 --nextAlloc2ndIndex;
8190 if(lastOffset < size)
8193 const VkDeviceSize unusedRangeSize = size - lastOffset;
8206 #if VMA_STATS_STRING_ENABLED 8207 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 8209 const VkDeviceSize size = GetSize();
8210 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8211 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8212 const size_t suballoc1stCount = suballocations1st.size();
8213 const size_t suballoc2ndCount = suballocations2nd.size();
8217 size_t unusedRangeCount = 0;
8218 VkDeviceSize usedBytes = 0;
8220 VkDeviceSize lastOffset = 0;
8222 size_t alloc2ndCount = 0;
8223 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8225 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8226 size_t nextAlloc2ndIndex = 0;
8227 while(lastOffset < freeSpace2ndTo1stEnd)
8230 while(nextAlloc2ndIndex < suballoc2ndCount &&
8231 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8233 ++nextAlloc2ndIndex;
8237 if(nextAlloc2ndIndex < suballoc2ndCount)
8239 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8242 if(lastOffset < suballoc.offset)
8251 usedBytes += suballoc.size;
8254 lastOffset = suballoc.offset + suballoc.size;
8255 ++nextAlloc2ndIndex;
8260 if(lastOffset < freeSpace2ndTo1stEnd)
8267 lastOffset = freeSpace2ndTo1stEnd;
8272 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8273 size_t alloc1stCount = 0;
8274 const VkDeviceSize freeSpace1stTo2ndEnd =
8275 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8276 while(lastOffset < freeSpace1stTo2ndEnd)
8279 while(nextAlloc1stIndex < suballoc1stCount &&
8280 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8282 ++nextAlloc1stIndex;
8286 if(nextAlloc1stIndex < suballoc1stCount)
8288 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8291 if(lastOffset < suballoc.offset)
8300 usedBytes += suballoc.size;
8303 lastOffset = suballoc.offset + suballoc.size;
8304 ++nextAlloc1stIndex;
8309 if(lastOffset < size)
8316 lastOffset = freeSpace1stTo2ndEnd;
8320 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8322 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8323 while(lastOffset < size)
8326 while(nextAlloc2ndIndex != SIZE_MAX &&
8327 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8329 --nextAlloc2ndIndex;
8333 if(nextAlloc2ndIndex != SIZE_MAX)
8335 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8338 if(lastOffset < suballoc.offset)
8347 usedBytes += suballoc.size;
8350 lastOffset = suballoc.offset + suballoc.size;
8351 --nextAlloc2ndIndex;
8356 if(lastOffset < size)
8368 const VkDeviceSize unusedBytes = size - usedBytes;
8369 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8374 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8376 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8377 size_t nextAlloc2ndIndex = 0;
8378 while(lastOffset < freeSpace2ndTo1stEnd)
8381 while(nextAlloc2ndIndex < suballoc2ndCount &&
8382 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8384 ++nextAlloc2ndIndex;
8388 if(nextAlloc2ndIndex < suballoc2ndCount)
8390 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8393 if(lastOffset < suballoc.offset)
8396 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8397 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8402 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8405 lastOffset = suballoc.offset + suballoc.size;
8406 ++nextAlloc2ndIndex;
8411 if(lastOffset < freeSpace2ndTo1stEnd)
8414 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8415 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8419 lastOffset = freeSpace2ndTo1stEnd;
8424 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8425 while(lastOffset < freeSpace1stTo2ndEnd)
8428 while(nextAlloc1stIndex < suballoc1stCount &&
8429 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8431 ++nextAlloc1stIndex;
8435 if(nextAlloc1stIndex < suballoc1stCount)
8437 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8440 if(lastOffset < suballoc.offset)
8443 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8444 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8449 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8452 lastOffset = suballoc.offset + suballoc.size;
8453 ++nextAlloc1stIndex;
8458 if(lastOffset < freeSpace1stTo2ndEnd)
8461 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8462 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8466 lastOffset = freeSpace1stTo2ndEnd;
8470 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8472 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8473 while(lastOffset < size)
8476 while(nextAlloc2ndIndex != SIZE_MAX &&
8477 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8479 --nextAlloc2ndIndex;
8483 if(nextAlloc2ndIndex != SIZE_MAX)
8485 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8488 if(lastOffset < suballoc.offset)
8491 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8492 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8497 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8500 lastOffset = suballoc.offset + suballoc.size;
8501 --nextAlloc2ndIndex;
8506 if(lastOffset < size)
8509 const VkDeviceSize unusedRangeSize = size - lastOffset;
8510 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8519 PrintDetailedMap_End(json);
8521 #endif // #if VMA_STATS_STRING_ENABLED 8523 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8524 uint32_t currentFrameIndex,
8525 uint32_t frameInUseCount,
8526 VkDeviceSize bufferImageGranularity,
8527 VkDeviceSize allocSize,
8528 VkDeviceSize allocAlignment,
8530 VmaSuballocationType allocType,
8531 bool canMakeOtherLost,
8533 VmaAllocationRequest* pAllocationRequest)
8535 VMA_ASSERT(allocSize > 0);
8536 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8537 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8538 VMA_HEAVY_ASSERT(Validate());
8540 const VkDeviceSize size = GetSize();
8541 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8542 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8546 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8548 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8553 if(allocSize > size)
8557 VkDeviceSize resultBaseOffset = size - allocSize;
8558 if(!suballocations2nd.empty())
8560 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8561 resultBaseOffset = lastSuballoc.offset - allocSize;
8562 if(allocSize > lastSuballoc.offset)
8569 VkDeviceSize resultOffset = resultBaseOffset;
8572 if(VMA_DEBUG_MARGIN > 0)
8574 if(resultOffset < VMA_DEBUG_MARGIN)
8578 resultOffset -= VMA_DEBUG_MARGIN;
8582 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8586 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8588 bool bufferImageGranularityConflict =
false;
8589 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8591 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8592 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8594 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8596 bufferImageGranularityConflict =
true;
8604 if(bufferImageGranularityConflict)
8606 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8611 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8612 suballocations1st.back().offset + suballocations1st.back().size :
8614 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8618 if(bufferImageGranularity > 1)
8620 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8622 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8623 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8625 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8639 pAllocationRequest->offset = resultOffset;
8640 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8641 pAllocationRequest->sumItemSize = 0;
8643 pAllocationRequest->itemsToMakeLostCount = 0;
8649 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8653 VkDeviceSize resultBaseOffset = 0;
8654 if(!suballocations1st.empty())
8656 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8657 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8661 VkDeviceSize resultOffset = resultBaseOffset;
8664 if(VMA_DEBUG_MARGIN > 0)
8666 resultOffset += VMA_DEBUG_MARGIN;
8670 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8674 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8676 bool bufferImageGranularityConflict =
false;
8677 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8679 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8680 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8682 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8684 bufferImageGranularityConflict =
true;
8692 if(bufferImageGranularityConflict)
8694 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8698 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8699 suballocations2nd.back().offset : size;
8702 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8706 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8708 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8710 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8711 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8713 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8727 pAllocationRequest->offset = resultOffset;
8728 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8729 pAllocationRequest->sumItemSize = 0;
8731 pAllocationRequest->itemsToMakeLostCount = 0;
8738 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8740 VMA_ASSERT(!suballocations1st.empty());
8742 VkDeviceSize resultBaseOffset = 0;
8743 if(!suballocations2nd.empty())
8745 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8746 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8750 VkDeviceSize resultOffset = resultBaseOffset;
8753 if(VMA_DEBUG_MARGIN > 0)
8755 resultOffset += VMA_DEBUG_MARGIN;
8759 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8763 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8765 bool bufferImageGranularityConflict =
false;
8766 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8768 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8769 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8771 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8773 bufferImageGranularityConflict =
true;
8781 if(bufferImageGranularityConflict)
8783 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8787 pAllocationRequest->itemsToMakeLostCount = 0;
8788 pAllocationRequest->sumItemSize = 0;
8789 size_t index1st = m_1stNullItemsBeginCount;
8791 if(canMakeOtherLost)
8793 while(index1st < suballocations1st.size() &&
8794 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8797 const VmaSuballocation& suballoc = suballocations1st[index1st];
8798 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
8804 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8805 if(suballoc.hAllocation->CanBecomeLost() &&
8806 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8808 ++pAllocationRequest->itemsToMakeLostCount;
8809 pAllocationRequest->sumItemSize += suballoc.size;
8821 if(bufferImageGranularity > 1)
8823 while(index1st < suballocations1st.size())
8825 const VmaSuballocation& suballoc = suballocations1st[index1st];
8826 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
8828 if(suballoc.hAllocation != VK_NULL_HANDLE)
8831 if(suballoc.hAllocation->CanBecomeLost() &&
8832 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8834 ++pAllocationRequest->itemsToMakeLostCount;
8835 pAllocationRequest->sumItemSize += suballoc.size;
8854 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
8855 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
8859 if(bufferImageGranularity > 1)
8861 for(
size_t nextSuballocIndex = index1st;
8862 nextSuballocIndex < suballocations1st.size();
8863 nextSuballocIndex++)
8865 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
8866 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8868 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8882 pAllocationRequest->offset = resultOffset;
8883 pAllocationRequest->sumFreeSize =
8884 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
8886 - pAllocationRequest->sumItemSize;
8896 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
8897 uint32_t currentFrameIndex,
8898 uint32_t frameInUseCount,
8899 VmaAllocationRequest* pAllocationRequest)
8901 if(pAllocationRequest->itemsToMakeLostCount == 0)
8906 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
8908 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8909 size_t index1st = m_1stNullItemsBeginCount;
8910 size_t madeLostCount = 0;
8911 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
8913 VMA_ASSERT(index1st < suballocations1st.size());
8914 VmaSuballocation& suballoc = suballocations1st[index1st];
8915 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8917 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8918 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
8919 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8921 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8922 suballoc.hAllocation = VK_NULL_HANDLE;
8923 m_SumFreeSize += suballoc.size;
8924 ++m_1stNullItemsMiddleCount;
8941 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8943 uint32_t lostAllocationCount = 0;
8945 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8946 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8948 VmaSuballocation& suballoc = suballocations1st[i];
8949 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8950 suballoc.hAllocation->CanBecomeLost() &&
8951 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8953 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8954 suballoc.hAllocation = VK_NULL_HANDLE;
8955 ++m_1stNullItemsMiddleCount;
8956 m_SumFreeSize += suballoc.size;
8957 ++lostAllocationCount;
8961 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8962 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8964 VmaSuballocation& suballoc = suballocations2nd[i];
8965 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8966 suballoc.hAllocation->CanBecomeLost() &&
8967 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8969 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8970 suballoc.hAllocation = VK_NULL_HANDLE;
8971 ++m_2ndNullItemsCount;
8972 ++lostAllocationCount;
8976 if(lostAllocationCount)
8981 return lostAllocationCount;
8984 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
8986 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8987 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8989 const VmaSuballocation& suballoc = suballocations1st[i];
8990 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8992 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
8994 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8995 return VK_ERROR_VALIDATION_FAILED_EXT;
8997 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8999 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9000 return VK_ERROR_VALIDATION_FAILED_EXT;
9005 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9006 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9008 const VmaSuballocation& suballoc = suballocations2nd[i];
9009 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9011 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9013 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9014 return VK_ERROR_VALIDATION_FAILED_EXT;
9016 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9018 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9019 return VK_ERROR_VALIDATION_FAILED_EXT;
9027 void VmaBlockMetadata_Linear::Alloc(
9028 const VmaAllocationRequest& request,
9029 VmaSuballocationType type,
9030 VkDeviceSize allocSize,
9034 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9038 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
9039 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
9040 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9041 suballocations2nd.push_back(newSuballoc);
9042 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
9046 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9049 if(suballocations1st.empty())
9051 suballocations1st.push_back(newSuballoc);
9056 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
9059 VMA_ASSERT(request.offset + allocSize <= GetSize());
9060 suballocations1st.push_back(newSuballoc);
9063 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
9065 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9067 switch(m_2ndVectorMode)
9069 case SECOND_VECTOR_EMPTY:
9071 VMA_ASSERT(suballocations2nd.empty());
9072 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
9074 case SECOND_VECTOR_RING_BUFFER:
9076 VMA_ASSERT(!suballocations2nd.empty());
9078 case SECOND_VECTOR_DOUBLE_STACK:
9079 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
9085 suballocations2nd.push_back(newSuballoc);
9089 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
9094 m_SumFreeSize -= newSuballoc.size;
9097 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
9099 FreeAtOffset(allocation->GetOffset());
9102 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
9104 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9105 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9107 if(!suballocations1st.empty())
9110 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9111 if(firstSuballoc.offset == offset)
9113 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9114 firstSuballoc.hAllocation = VK_NULL_HANDLE;
9115 m_SumFreeSize += firstSuballoc.size;
9116 ++m_1stNullItemsBeginCount;
9123 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
9124 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9126 VmaSuballocation& lastSuballoc = suballocations2nd.back();
9127 if(lastSuballoc.offset == offset)
9129 m_SumFreeSize += lastSuballoc.size;
9130 suballocations2nd.pop_back();
9136 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
9138 VmaSuballocation& lastSuballoc = suballocations1st.back();
9139 if(lastSuballoc.offset == offset)
9141 m_SumFreeSize += lastSuballoc.size;
9142 suballocations1st.pop_back();
9150 VmaSuballocation refSuballoc;
9151 refSuballoc.offset = offset;
9153 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
9154 suballocations1st.begin() + m_1stNullItemsBeginCount,
9155 suballocations1st.end(),
9157 if(it != suballocations1st.end())
9159 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9160 it->hAllocation = VK_NULL_HANDLE;
9161 ++m_1stNullItemsMiddleCount;
9162 m_SumFreeSize += it->size;
9168 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
9171 VmaSuballocation refSuballoc;
9172 refSuballoc.offset = offset;
9174 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
9175 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
9176 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
9177 if(it != suballocations2nd.end())
9179 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9180 it->hAllocation = VK_NULL_HANDLE;
9181 ++m_2ndNullItemsCount;
9182 m_SumFreeSize += it->size;
9188 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
9191 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 9193 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9194 const size_t suballocCount = AccessSuballocations1st().size();
9195 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
9198 void VmaBlockMetadata_Linear::CleanupAfterFree()
9200 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9201 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9205 suballocations1st.clear();
9206 suballocations2nd.clear();
9207 m_1stNullItemsBeginCount = 0;
9208 m_1stNullItemsMiddleCount = 0;
9209 m_2ndNullItemsCount = 0;
9210 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9214 const size_t suballoc1stCount = suballocations1st.size();
9215 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9216 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
9219 while(m_1stNullItemsBeginCount < suballoc1stCount &&
9220 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9222 ++m_1stNullItemsBeginCount;
9223 --m_1stNullItemsMiddleCount;
9227 while(m_1stNullItemsMiddleCount > 0 &&
9228 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
9230 --m_1stNullItemsMiddleCount;
9231 suballocations1st.pop_back();
9235 while(m_2ndNullItemsCount > 0 &&
9236 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
9238 --m_2ndNullItemsCount;
9239 suballocations2nd.pop_back();
9242 if(ShouldCompact1st())
9244 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
9245 size_t srcIndex = m_1stNullItemsBeginCount;
9246 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
9248 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
9252 if(dstIndex != srcIndex)
9254 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9258 suballocations1st.resize(nonNullItemCount);
9259 m_1stNullItemsBeginCount = 0;
9260 m_1stNullItemsMiddleCount = 0;
9264 if(suballocations2nd.empty())
9266 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9270 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9272 suballocations1st.clear();
9273 m_1stNullItemsBeginCount = 0;
9275 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9278 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9279 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9280 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9281 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9283 ++m_1stNullItemsBeginCount;
9284 --m_1stNullItemsMiddleCount;
9286 m_2ndNullItemsCount = 0;
9287 m_1stVectorIndex ^= 1;
9292 VMA_HEAVY_ASSERT(Validate());
9299 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
9300 VmaBlockMetadata(hAllocator),
9302 m_AllocationCount(0),
9306 memset(m_FreeList, 0,
sizeof(m_FreeList));
9309 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9314 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
9316 VmaBlockMetadata::Init(size);
9318 m_UsableSize = VmaPrevPow2(size);
9319 m_SumFreeSize = m_UsableSize;
9323 while(m_LevelCount < MAX_LEVELS &&
9324 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
9329 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
9330 rootNode->offset = 0;
9331 rootNode->type = Node::TYPE_FREE;
9332 rootNode->parent = VMA_NULL;
9333 rootNode->buddy = VMA_NULL;
9336 AddToFreeListFront(0, rootNode);
9339 bool VmaBlockMetadata_Buddy::Validate()
const 9342 ValidationContext ctx;
9343 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
9345 VMA_VALIDATE(
false &&
"ValidateNode failed.");
9347 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
9348 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
9351 for(uint32_t level = 0; level < m_LevelCount; ++level)
9353 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
9354 m_FreeList[level].front->free.prev == VMA_NULL);
9356 for(Node* node = m_FreeList[level].front;
9358 node = node->free.next)
9360 VMA_VALIDATE(node->type == Node::TYPE_FREE);
9362 if(node->free.next == VMA_NULL)
9364 VMA_VALIDATE(m_FreeList[level].back == node);
9368 VMA_VALIDATE(node->free.next->free.prev == node);
9374 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
9376 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
9382 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 9384 for(uint32_t level = 0; level < m_LevelCount; ++level)
9386 if(m_FreeList[level].front != VMA_NULL)
9388 return LevelToNodeSize(level);
9394 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9396 const VkDeviceSize unusableSize = GetUnusableSize();
9407 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
9409 if(unusableSize > 0)
9418 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 9420 const VkDeviceSize unusableSize = GetUnusableSize();
9422 inoutStats.
size += GetSize();
9423 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
9428 if(unusableSize > 0)
9435 #if VMA_STATS_STRING_ENABLED 9437 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 9441 CalcAllocationStatInfo(stat);
9443 PrintDetailedMap_Begin(
9449 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
9451 const VkDeviceSize unusableSize = GetUnusableSize();
9452 if(unusableSize > 0)
9454 PrintDetailedMap_UnusedRange(json,
9459 PrintDetailedMap_End(json);
9462 #endif // #if VMA_STATS_STRING_ENABLED 9464 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9465 uint32_t currentFrameIndex,
9466 uint32_t frameInUseCount,
9467 VkDeviceSize bufferImageGranularity,
9468 VkDeviceSize allocSize,
9469 VkDeviceSize allocAlignment,
9471 VmaSuballocationType allocType,
9472 bool canMakeOtherLost,
9474 VmaAllocationRequest* pAllocationRequest)
9476 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9480 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
9481 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
9482 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
9484 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
9485 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
9488 if(allocSize > m_UsableSize)
9493 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9494 for(uint32_t level = targetLevel + 1; level--; )
9496 for(Node* freeNode = m_FreeList[level].front;
9497 freeNode != VMA_NULL;
9498 freeNode = freeNode->free.next)
9500 if(freeNode->offset % allocAlignment == 0)
9502 pAllocationRequest->offset = freeNode->offset;
9503 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
9504 pAllocationRequest->sumItemSize = 0;
9505 pAllocationRequest->itemsToMakeLostCount = 0;
9506 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9515 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
9516 uint32_t currentFrameIndex,
9517 uint32_t frameInUseCount,
9518 VmaAllocationRequest* pAllocationRequest)
9524 return pAllocationRequest->itemsToMakeLostCount == 0;
9527 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9536 void VmaBlockMetadata_Buddy::Alloc(
9537 const VmaAllocationRequest& request,
9538 VmaSuballocationType type,
9539 VkDeviceSize allocSize,
9543 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9544 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
9546 Node* currNode = m_FreeList[currLevel].front;
9547 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9548 while(currNode->offset != request.offset)
9550 currNode = currNode->free.next;
9551 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9555 while(currLevel < targetLevel)
9559 RemoveFromFreeList(currLevel, currNode);
9561 const uint32_t childrenLevel = currLevel + 1;
9564 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
9565 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
9567 leftChild->offset = currNode->offset;
9568 leftChild->type = Node::TYPE_FREE;
9569 leftChild->parent = currNode;
9570 leftChild->buddy = rightChild;
9572 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9573 rightChild->type = Node::TYPE_FREE;
9574 rightChild->parent = currNode;
9575 rightChild->buddy = leftChild;
9578 currNode->type = Node::TYPE_SPLIT;
9579 currNode->split.leftChild = leftChild;
9582 AddToFreeListFront(childrenLevel, rightChild);
9583 AddToFreeListFront(childrenLevel, leftChild);
9588 currNode = m_FreeList[currLevel].front;
9597 VMA_ASSERT(currLevel == targetLevel &&
9598 currNode != VMA_NULL &&
9599 currNode->type == Node::TYPE_FREE);
9600 RemoveFromFreeList(currLevel, currNode);
9603 currNode->type = Node::TYPE_ALLOCATION;
9604 currNode->allocation.alloc = hAllocation;
9606 ++m_AllocationCount;
9608 m_SumFreeSize -= allocSize;
9611 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
9613 if(node->type == Node::TYPE_SPLIT)
9615 DeleteNode(node->split.leftChild->buddy);
9616 DeleteNode(node->split.leftChild);
9619 vma_delete(GetAllocationCallbacks(), node);
9622 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 9624 VMA_VALIDATE(level < m_LevelCount);
9625 VMA_VALIDATE(curr->parent == parent);
9626 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
9627 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
9630 case Node::TYPE_FREE:
9632 ctx.calculatedSumFreeSize += levelNodeSize;
9633 ++ctx.calculatedFreeCount;
9635 case Node::TYPE_ALLOCATION:
9636 ++ctx.calculatedAllocationCount;
9637 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
9638 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
9640 case Node::TYPE_SPLIT:
9642 const uint32_t childrenLevel = level + 1;
9643 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
9644 const Node*
const leftChild = curr->split.leftChild;
9645 VMA_VALIDATE(leftChild != VMA_NULL);
9646 VMA_VALIDATE(leftChild->offset == curr->offset);
9647 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
9649 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
9651 const Node*
const rightChild = leftChild->buddy;
9652 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
9653 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
9655 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
9666 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 9670 VkDeviceSize currLevelNodeSize = m_UsableSize;
9671 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
9672 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
9675 currLevelNodeSize = nextLevelNodeSize;
9676 nextLevelNodeSize = currLevelNodeSize >> 1;
9681 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
9684 Node* node = m_Root;
9685 VkDeviceSize nodeOffset = 0;
9687 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
9688 while(node->type == Node::TYPE_SPLIT)
9690 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
9691 if(offset < nodeOffset + nextLevelSize)
9693 node = node->split.leftChild;
9697 node = node->split.leftChild->buddy;
9698 nodeOffset += nextLevelSize;
9701 levelNodeSize = nextLevelSize;
9704 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9705 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
9708 --m_AllocationCount;
9709 m_SumFreeSize += alloc->GetSize();
9711 node->type = Node::TYPE_FREE;
9714 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
9716 RemoveFromFreeList(level, node->buddy);
9717 Node*
const parent = node->parent;
9719 vma_delete(GetAllocationCallbacks(), node->buddy);
9720 vma_delete(GetAllocationCallbacks(), node);
9721 parent->type = Node::TYPE_FREE;
9729 AddToFreeListFront(level, node);
9732 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 9736 case Node::TYPE_FREE:
9742 case Node::TYPE_ALLOCATION:
9744 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9750 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
9751 if(unusedRangeSize > 0)
9760 case Node::TYPE_SPLIT:
9762 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9763 const Node*
const leftChild = node->split.leftChild;
9764 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
9765 const Node*
const rightChild = leftChild->buddy;
9766 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
9774 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
9776 VMA_ASSERT(node->type == Node::TYPE_FREE);
9779 Node*
const frontNode = m_FreeList[level].front;
9780 if(frontNode == VMA_NULL)
9782 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9783 node->free.prev = node->free.next = VMA_NULL;
9784 m_FreeList[level].front = m_FreeList[level].back = node;
9788 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9789 node->free.prev = VMA_NULL;
9790 node->free.next = frontNode;
9791 frontNode->free.prev = node;
9792 m_FreeList[level].front = node;
9796 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
9798 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
9801 if(node->free.prev == VMA_NULL)
9803 VMA_ASSERT(m_FreeList[level].front == node);
9804 m_FreeList[level].front = node->free.next;
9808 Node*
const prevFreeNode = node->free.prev;
9809 VMA_ASSERT(prevFreeNode->free.next == node);
9810 prevFreeNode->free.next = node->free.next;
9814 if(node->free.next == VMA_NULL)
9816 VMA_ASSERT(m_FreeList[level].back == node);
9817 m_FreeList[level].back = node->free.prev;
9821 Node*
const nextFreeNode = node->free.next;
9822 VMA_ASSERT(nextFreeNode->free.prev == node);
9823 nextFreeNode->free.prev = node->free.prev;
9827 #if VMA_STATS_STRING_ENABLED 9828 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 9832 case Node::TYPE_FREE:
9833 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
9835 case Node::TYPE_ALLOCATION:
9837 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
9838 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9839 if(allocSize < levelNodeSize)
9841 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
9845 case Node::TYPE_SPLIT:
9847 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9848 const Node*
const leftChild = node->split.leftChild;
9849 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
9850 const Node*
const rightChild = leftChild->buddy;
9851 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
9858 #endif // #if VMA_STATS_STRING_ENABLED 9864 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
9865 m_pMetadata(VMA_NULL),
9866 m_MemoryTypeIndex(UINT32_MAX),
9868 m_hMemory(VK_NULL_HANDLE),
9870 m_pMappedData(VMA_NULL)
9874 void VmaDeviceMemoryBlock::Init(
9876 uint32_t newMemoryTypeIndex,
9877 VkDeviceMemory newMemory,
9878 VkDeviceSize newSize,
9882 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
9884 m_MemoryTypeIndex = newMemoryTypeIndex;
9886 m_hMemory = newMemory;
9891 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
9894 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
9900 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
9902 m_pMetadata->Init(newSize);
9905 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
9909 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
9911 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
9912 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
9913 m_hMemory = VK_NULL_HANDLE;
9915 vma_delete(allocator, m_pMetadata);
9916 m_pMetadata = VMA_NULL;
9919 bool VmaDeviceMemoryBlock::Validate()
const 9921 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
9922 (m_pMetadata->GetSize() != 0));
9924 return m_pMetadata->Validate();
9927 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
9929 void* pData =
nullptr;
9930 VkResult res = Map(hAllocator, 1, &pData);
9931 if(res != VK_SUCCESS)
9936 res = m_pMetadata->CheckCorruption(pData);
9938 Unmap(hAllocator, 1);
9943 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
9950 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9953 m_MapCount += count;
9954 VMA_ASSERT(m_pMappedData != VMA_NULL);
9955 if(ppData != VMA_NULL)
9957 *ppData = m_pMappedData;
9963 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9964 hAllocator->m_hDevice,
9970 if(result == VK_SUCCESS)
9972 if(ppData != VMA_NULL)
9974 *ppData = m_pMappedData;
9982 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
9989 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9990 if(m_MapCount >= count)
9992 m_MapCount -= count;
9995 m_pMappedData = VMA_NULL;
9996 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
10001 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
10005 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10007 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10008 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10011 VkResult res = Map(hAllocator, 1, &pData);
10012 if(res != VK_SUCCESS)
10017 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
10018 VmaWriteMagicValue(pData, allocOffset + allocSize);
10020 Unmap(hAllocator, 1);
10025 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10027 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10028 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10031 VkResult res = Map(hAllocator, 1, &pData);
10032 if(res != VK_SUCCESS)
10037 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
10039 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
10041 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
10043 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
10046 Unmap(hAllocator, 1);
10051 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
10056 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10057 hAllocation->GetBlock() ==
this);
10059 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10060 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
10061 hAllocator->m_hDevice,
10064 hAllocation->GetOffset());
10067 VkResult VmaDeviceMemoryBlock::BindImageMemory(
10072 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10073 hAllocation->GetBlock() ==
this);
10075 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10076 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
10077 hAllocator->m_hDevice,
10080 hAllocation->GetOffset());
10085 memset(&outInfo, 0,
sizeof(outInfo));
10104 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
10112 VmaPool_T::VmaPool_T(
10115 VkDeviceSize preferredBlockSize) :
10118 createInfo.memoryTypeIndex,
10119 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
10120 createInfo.minBlockCount,
10121 createInfo.maxBlockCount,
10123 createInfo.frameInUseCount,
10125 createInfo.blockSize != 0,
10131 VmaPool_T::~VmaPool_T()
10135 #if VMA_STATS_STRING_ENABLED 10137 #endif // #if VMA_STATS_STRING_ENABLED 10139 VmaBlockVector::VmaBlockVector(
10141 uint32_t memoryTypeIndex,
10142 VkDeviceSize preferredBlockSize,
10143 size_t minBlockCount,
10144 size_t maxBlockCount,
10145 VkDeviceSize bufferImageGranularity,
10146 uint32_t frameInUseCount,
10148 bool explicitBlockSize,
10149 uint32_t algorithm) :
10150 m_hAllocator(hAllocator),
10151 m_MemoryTypeIndex(memoryTypeIndex),
10152 m_PreferredBlockSize(preferredBlockSize),
10153 m_MinBlockCount(minBlockCount),
10154 m_MaxBlockCount(maxBlockCount),
10155 m_BufferImageGranularity(bufferImageGranularity),
10156 m_FrameInUseCount(frameInUseCount),
10157 m_IsCustomPool(isCustomPool),
10158 m_ExplicitBlockSize(explicitBlockSize),
10159 m_Algorithm(algorithm),
10160 m_HasEmptyBlock(false),
10161 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
10162 m_pDefragmentator(VMA_NULL),
10167 VmaBlockVector::~VmaBlockVector()
10169 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
10171 for(
size_t i = m_Blocks.size(); i--; )
10173 m_Blocks[i]->Destroy(m_hAllocator);
10174 vma_delete(m_hAllocator, m_Blocks[i]);
10178 VkResult VmaBlockVector::CreateMinBlocks()
10180 for(
size_t i = 0; i < m_MinBlockCount; ++i)
10182 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
10183 if(res != VK_SUCCESS)
10191 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
10193 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10195 const size_t blockCount = m_Blocks.size();
10204 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10206 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10207 VMA_ASSERT(pBlock);
10208 VMA_HEAVY_ASSERT(pBlock->Validate());
10209 pBlock->m_pMetadata->AddPoolStats(*pStats);
10213 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 10215 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10216 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
10217 (VMA_DEBUG_MARGIN > 0) &&
10218 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
10221 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
10223 VkResult VmaBlockVector::Allocate(
10225 uint32_t currentFrameIndex,
10227 VkDeviceSize alignment,
10229 VmaSuballocationType suballocType,
10236 const bool canCreateNewBlock =
10238 (m_Blocks.size() < m_MaxBlockCount);
10245 canMakeOtherLost =
false;
10249 if(isUpperAddress &&
10252 return VK_ERROR_FEATURE_NOT_PRESENT;
10266 return VK_ERROR_FEATURE_NOT_PRESENT;
10270 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
10272 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10275 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10282 if(!canMakeOtherLost || canCreateNewBlock)
10291 if(!m_Blocks.empty())
10293 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
10294 VMA_ASSERT(pCurrBlock);
10295 VkResult res = AllocateFromBlock(
10306 if(res == VK_SUCCESS)
10308 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
10318 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10320 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10321 VMA_ASSERT(pCurrBlock);
10322 VkResult res = AllocateFromBlock(
10333 if(res == VK_SUCCESS)
10335 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10343 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10345 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10346 VMA_ASSERT(pCurrBlock);
10347 VkResult res = AllocateFromBlock(
10358 if(res == VK_SUCCESS)
10360 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10368 if(canCreateNewBlock)
10371 VkDeviceSize newBlockSize = m_PreferredBlockSize;
10372 uint32_t newBlockSizeShift = 0;
10373 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
10375 if(!m_ExplicitBlockSize)
10378 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
10379 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
10381 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10382 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
10384 newBlockSize = smallerNewBlockSize;
10385 ++newBlockSizeShift;
10394 size_t newBlockIndex = 0;
10395 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
10397 if(!m_ExplicitBlockSize)
10399 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
10401 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10402 if(smallerNewBlockSize >= size)
10404 newBlockSize = smallerNewBlockSize;
10405 ++newBlockSizeShift;
10406 res = CreateBlock(newBlockSize, &newBlockIndex);
10415 if(res == VK_SUCCESS)
10417 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
10418 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
10420 res = AllocateFromBlock(
10431 if(res == VK_SUCCESS)
10433 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
10439 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10446 if(canMakeOtherLost)
10448 uint32_t tryIndex = 0;
10449 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
10451 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
10452 VmaAllocationRequest bestRequest = {};
10453 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
10459 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10461 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10462 VMA_ASSERT(pCurrBlock);
10463 VmaAllocationRequest currRequest = {};
10464 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10467 m_BufferImageGranularity,
10476 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10477 if(pBestRequestBlock == VMA_NULL ||
10478 currRequestCost < bestRequestCost)
10480 pBestRequestBlock = pCurrBlock;
10481 bestRequest = currRequest;
10482 bestRequestCost = currRequestCost;
10484 if(bestRequestCost == 0)
10495 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10497 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10498 VMA_ASSERT(pCurrBlock);
10499 VmaAllocationRequest currRequest = {};
10500 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10503 m_BufferImageGranularity,
10512 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10513 if(pBestRequestBlock == VMA_NULL ||
10514 currRequestCost < bestRequestCost ||
10517 pBestRequestBlock = pCurrBlock;
10518 bestRequest = currRequest;
10519 bestRequestCost = currRequestCost;
10521 if(bestRequestCost == 0 ||
10531 if(pBestRequestBlock != VMA_NULL)
10535 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
10536 if(res != VK_SUCCESS)
10542 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
10548 if(pBestRequestBlock->m_pMetadata->IsEmpty())
10550 m_HasEmptyBlock =
false;
10553 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10554 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
10555 (*pAllocation)->InitBlockAllocation(
10558 bestRequest.offset,
10564 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
10565 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
10566 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
10567 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10569 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10571 if(IsCorruptionDetectionEnabled())
10573 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
10574 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10589 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
10591 return VK_ERROR_TOO_MANY_OBJECTS;
10595 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10598 void VmaBlockVector::Free(
10601 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
10605 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10607 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
10609 if(IsCorruptionDetectionEnabled())
10611 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
10612 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
10615 if(hAllocation->IsPersistentMap())
10617 pBlock->Unmap(m_hAllocator, 1);
10620 pBlock->m_pMetadata->Free(hAllocation);
10621 VMA_HEAVY_ASSERT(pBlock->Validate());
10623 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
10626 if(pBlock->m_pMetadata->IsEmpty())
10629 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
10631 pBlockToDelete = pBlock;
10637 m_HasEmptyBlock =
true;
10642 else if(m_HasEmptyBlock)
10644 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
10645 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
10647 pBlockToDelete = pLastBlock;
10648 m_Blocks.pop_back();
10649 m_HasEmptyBlock =
false;
10653 IncrementallySortBlocks();
10658 if(pBlockToDelete != VMA_NULL)
10660 VMA_DEBUG_LOG(
" Deleted empty allocation");
10661 pBlockToDelete->Destroy(m_hAllocator);
10662 vma_delete(m_hAllocator, pBlockToDelete);
10666 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 10668 VkDeviceSize result = 0;
10669 for(
size_t i = m_Blocks.size(); i--; )
10671 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
10672 if(result >= m_PreferredBlockSize)
10680 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
10682 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10684 if(m_Blocks[blockIndex] == pBlock)
10686 VmaVectorRemove(m_Blocks, blockIndex);
10693 void VmaBlockVector::IncrementallySortBlocks()
10698 for(
size_t i = 1; i < m_Blocks.size(); ++i)
10700 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
10702 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
10709 VkResult VmaBlockVector::AllocateFromBlock(
10710 VmaDeviceMemoryBlock* pBlock,
10712 uint32_t currentFrameIndex,
10714 VkDeviceSize alignment,
10717 VmaSuballocationType suballocType,
10726 VmaAllocationRequest currRequest = {};
10727 if(pBlock->m_pMetadata->CreateAllocationRequest(
10730 m_BufferImageGranularity,
10740 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
10744 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
10745 if(res != VK_SUCCESS)
10752 if(pBlock->m_pMetadata->IsEmpty())
10754 m_HasEmptyBlock =
false;
10757 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10758 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
10759 (*pAllocation)->InitBlockAllocation(
10762 currRequest.offset,
10768 VMA_HEAVY_ASSERT(pBlock->Validate());
10769 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
10770 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10772 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10774 if(IsCorruptionDetectionEnabled())
10776 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
10777 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10781 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10784 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
10786 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
10787 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
10788 allocInfo.allocationSize = blockSize;
10789 VkDeviceMemory mem = VK_NULL_HANDLE;
10790 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
10799 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
10804 allocInfo.allocationSize,
10808 m_Blocks.push_back(pBlock);
10809 if(pNewBlockIndex != VMA_NULL)
10811 *pNewBlockIndex = m_Blocks.size() - 1;
10817 #if VMA_STATS_STRING_ENABLED 10819 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
10821 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10823 json.BeginObject();
10827 json.WriteString(
"MemoryTypeIndex");
10828 json.WriteNumber(m_MemoryTypeIndex);
10830 json.WriteString(
"BlockSize");
10831 json.WriteNumber(m_PreferredBlockSize);
10833 json.WriteString(
"BlockCount");
10834 json.BeginObject(
true);
10835 if(m_MinBlockCount > 0)
10837 json.WriteString(
"Min");
10838 json.WriteNumber((uint64_t)m_MinBlockCount);
10840 if(m_MaxBlockCount < SIZE_MAX)
10842 json.WriteString(
"Max");
10843 json.WriteNumber((uint64_t)m_MaxBlockCount);
10845 json.WriteString(
"Cur");
10846 json.WriteNumber((uint64_t)m_Blocks.size());
10849 if(m_FrameInUseCount > 0)
10851 json.WriteString(
"FrameInUseCount");
10852 json.WriteNumber(m_FrameInUseCount);
10855 if(m_Algorithm != 0)
10857 json.WriteString(
"Algorithm");
10858 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
10863 json.WriteString(
"PreferredBlockSize");
10864 json.WriteNumber(m_PreferredBlockSize);
10867 json.WriteString(
"Blocks");
10868 json.BeginObject();
10869 for(
size_t i = 0; i < m_Blocks.size(); ++i)
10871 json.BeginString();
10872 json.ContinueString(m_Blocks[i]->GetId());
10875 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
10882 #endif // #if VMA_STATS_STRING_ENABLED 10884 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
10886 uint32_t currentFrameIndex)
10888 if(m_pDefragmentator == VMA_NULL)
10890 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
10893 currentFrameIndex);
10896 return m_pDefragmentator;
10899 VkResult VmaBlockVector::Defragment(
10901 VkDeviceSize& maxBytesToMove,
10902 uint32_t& maxAllocationsToMove)
10904 if(m_pDefragmentator == VMA_NULL)
10909 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10912 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
10915 if(pDefragmentationStats != VMA_NULL)
10917 const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
10918 const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
10919 pDefragmentationStats->
bytesMoved += bytesMoved;
10921 VMA_ASSERT(bytesMoved <= maxBytesToMove);
10922 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
10923 maxBytesToMove -= bytesMoved;
10924 maxAllocationsToMove -= allocationsMoved;
10928 m_HasEmptyBlock =
false;
10929 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10931 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
10932 if(pBlock->m_pMetadata->IsEmpty())
10934 if(m_Blocks.size() > m_MinBlockCount)
10936 if(pDefragmentationStats != VMA_NULL)
10939 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
10942 VmaVectorRemove(m_Blocks, blockIndex);
10943 pBlock->Destroy(m_hAllocator);
10944 vma_delete(m_hAllocator, pBlock);
10948 m_HasEmptyBlock =
true;
10956 void VmaBlockVector::DestroyDefragmentator()
10958 if(m_pDefragmentator != VMA_NULL)
10960 vma_delete(m_hAllocator, m_pDefragmentator);
10961 m_pDefragmentator = VMA_NULL;
10965 void VmaBlockVector::MakePoolAllocationsLost(
10966 uint32_t currentFrameIndex,
10967 size_t* pLostAllocationCount)
10969 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10970 size_t lostAllocationCount = 0;
10971 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10973 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10974 VMA_ASSERT(pBlock);
10975 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
10977 if(pLostAllocationCount != VMA_NULL)
10979 *pLostAllocationCount = lostAllocationCount;
10983 VkResult VmaBlockVector::CheckCorruption()
10985 if(!IsCorruptionDetectionEnabled())
10987 return VK_ERROR_FEATURE_NOT_PRESENT;
10990 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10991 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10993 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10994 VMA_ASSERT(pBlock);
10995 VkResult res = pBlock->CheckCorruption(m_hAllocator);
10996 if(res != VK_SUCCESS)
11004 void VmaBlockVector::AddStats(
VmaStats* pStats)
11006 const uint32_t memTypeIndex = m_MemoryTypeIndex;
11007 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
11009 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11011 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11013 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11014 VMA_ASSERT(pBlock);
11015 VMA_HEAVY_ASSERT(pBlock->Validate());
11017 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
11018 VmaAddStatInfo(pStats->
total, allocationStatInfo);
11019 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
11020 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11027 VmaDefragmentator::VmaDefragmentator(
11029 VmaBlockVector* pBlockVector,
11030 uint32_t currentFrameIndex) :
11031 m_hAllocator(hAllocator),
11032 m_pBlockVector(pBlockVector),
11033 m_CurrentFrameIndex(currentFrameIndex),
11035 m_AllocationsMoved(0),
11036 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
11037 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
11039 VMA_ASSERT(pBlockVector->GetAlgorithm() == 0);
11042 VmaDefragmentator::~VmaDefragmentator()
11044 for(
size_t i = m_Blocks.size(); i--; )
11046 vma_delete(m_hAllocator, m_Blocks[i]);
11050 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
11052 AllocationInfo allocInfo;
11053 allocInfo.m_hAllocation = hAlloc;
11054 allocInfo.m_pChanged = pChanged;
11055 m_Allocations.push_back(allocInfo);
11058 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
11061 if(m_pMappedDataForDefragmentation)
11063 *ppMappedData = m_pMappedDataForDefragmentation;
11068 if(m_pBlock->GetMappedData())
11070 *ppMappedData = m_pBlock->GetMappedData();
11075 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
11076 *ppMappedData = m_pMappedDataForDefragmentation;
11080 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
11082 if(m_pMappedDataForDefragmentation != VMA_NULL)
11084 m_pBlock->Unmap(hAllocator, 1);
11088 VkResult VmaDefragmentator::DefragmentRound(
11089 VkDeviceSize maxBytesToMove,
11090 uint32_t maxAllocationsToMove)
11092 if(m_Blocks.empty())
11097 size_t srcBlockIndex = m_Blocks.size() - 1;
11098 size_t srcAllocIndex = SIZE_MAX;
11104 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
11106 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
11109 if(srcBlockIndex == 0)
11116 srcAllocIndex = SIZE_MAX;
11121 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
11125 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
11126 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
11128 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
11129 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
11130 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
11131 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
11134 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
11136 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
11137 VmaAllocationRequest dstAllocRequest;
11138 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
11139 m_CurrentFrameIndex,
11140 m_pBlockVector->GetFrameInUseCount(),
11141 m_pBlockVector->GetBufferImageGranularity(),
11148 &dstAllocRequest) &&
11150 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
11152 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
11155 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
11156 (m_BytesMoved + size > maxBytesToMove))
11158 return VK_INCOMPLETE;
11161 void* pDstMappedData = VMA_NULL;
11162 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
11163 if(res != VK_SUCCESS)
11168 void* pSrcMappedData = VMA_NULL;
11169 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
11170 if(res != VK_SUCCESS)
11177 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
11178 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
11179 static_cast<size_t>(size));
11181 if(VMA_DEBUG_MARGIN > 0)
11183 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
11184 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
11187 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
11192 allocInfo.m_hAllocation);
11193 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
11195 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
11197 if(allocInfo.m_pChanged != VMA_NULL)
11199 *allocInfo.m_pChanged = VK_TRUE;
11202 ++m_AllocationsMoved;
11203 m_BytesMoved += size;
11205 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
11213 if(srcAllocIndex > 0)
11219 if(srcBlockIndex > 0)
11222 srcAllocIndex = SIZE_MAX;
11232 VkResult VmaDefragmentator::Defragment(
11233 VkDeviceSize maxBytesToMove,
11234 uint32_t maxAllocationsToMove)
11236 if(m_Allocations.empty())
11242 const size_t blockCount = m_pBlockVector->m_Blocks.size();
11243 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11245 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
11246 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
11247 m_Blocks.push_back(pBlockInfo);
11251 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
11254 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
11256 AllocationInfo& allocInfo = m_Allocations[blockIndex];
11258 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11260 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
11261 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
11262 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
11264 (*it)->m_Allocations.push_back(allocInfo);
11272 m_Allocations.clear();
11274 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11276 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
11277 pBlockInfo->CalcHasNonMovableAllocations();
11278 pBlockInfo->SortAllocationsBySizeDescecnding();
11282 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
11285 VkResult result = VK_SUCCESS;
11286 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
11288 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
11292 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11294 m_Blocks[blockIndex]->Unmap(m_hAllocator);
11300 bool VmaDefragmentator::MoveMakesSense(
11301 size_t dstBlockIndex, VkDeviceSize dstOffset,
11302 size_t srcBlockIndex, VkDeviceSize srcOffset)
11304 if(dstBlockIndex < srcBlockIndex)
11308 if(dstBlockIndex > srcBlockIndex)
11312 if(dstOffset < srcOffset)
11322 #if VMA_RECORDING_ENABLED 11324 VmaRecorder::VmaRecorder() :
11329 m_StartCounter(INT64_MAX)
11335 m_UseMutex = useMutex;
11336 m_Flags = settings.
flags;
11338 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
11339 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
11342 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
11345 return VK_ERROR_INITIALIZATION_FAILED;
11349 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
11350 fprintf(m_File,
"%s\n",
"1,3");
11355 VmaRecorder::~VmaRecorder()
11357 if(m_File != VMA_NULL)
11363 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
11365 CallParams callParams;
11366 GetBasicParams(callParams);
11368 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11369 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
11373 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
11375 CallParams callParams;
11376 GetBasicParams(callParams);
11378 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11379 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
11385 CallParams callParams;
11386 GetBasicParams(callParams);
11388 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11389 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
11400 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
11402 CallParams callParams;
11403 GetBasicParams(callParams);
11405 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11406 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
11411 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
11412 const VkMemoryRequirements& vkMemReq,
11416 CallParams callParams;
11417 GetBasicParams(callParams);
11419 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11420 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11421 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11423 vkMemReq.alignment,
11424 vkMemReq.memoryTypeBits,
11432 userDataStr.GetString());
11436 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
11437 const VkMemoryRequirements& vkMemReq,
11438 bool requiresDedicatedAllocation,
11439 bool prefersDedicatedAllocation,
11443 CallParams callParams;
11444 GetBasicParams(callParams);
11446 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11447 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11448 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11450 vkMemReq.alignment,
11451 vkMemReq.memoryTypeBits,
11452 requiresDedicatedAllocation ? 1 : 0,
11453 prefersDedicatedAllocation ? 1 : 0,
11461 userDataStr.GetString());
11465 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
11466 const VkMemoryRequirements& vkMemReq,
11467 bool requiresDedicatedAllocation,
11468 bool prefersDedicatedAllocation,
11472 CallParams callParams;
11473 GetBasicParams(callParams);
11475 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11476 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11477 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11479 vkMemReq.alignment,
11480 vkMemReq.memoryTypeBits,
11481 requiresDedicatedAllocation ? 1 : 0,
11482 prefersDedicatedAllocation ? 1 : 0,
11490 userDataStr.GetString());
11494 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
11497 CallParams callParams;
11498 GetBasicParams(callParams);
11500 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11501 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11506 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
11508 const void* pUserData)
11510 CallParams callParams;
11511 GetBasicParams(callParams);
11513 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11514 UserDataString userDataStr(
11517 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11519 userDataStr.GetString());
11523 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
11526 CallParams callParams;
11527 GetBasicParams(callParams);
11529 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11530 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11535 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
11538 CallParams callParams;
11539 GetBasicParams(callParams);
11541 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11542 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11547 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
11550 CallParams callParams;
11551 GetBasicParams(callParams);
11553 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11554 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11559 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
11560 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11562 CallParams callParams;
11563 GetBasicParams(callParams);
11565 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11566 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11573 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
11574 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11576 CallParams callParams;
11577 GetBasicParams(callParams);
11579 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11580 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11587 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
11588 const VkBufferCreateInfo& bufCreateInfo,
11592 CallParams callParams;
11593 GetBasicParams(callParams);
11595 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11596 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11597 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11598 bufCreateInfo.flags,
11599 bufCreateInfo.size,
11600 bufCreateInfo.usage,
11601 bufCreateInfo.sharingMode,
11602 allocCreateInfo.
flags,
11603 allocCreateInfo.
usage,
11607 allocCreateInfo.
pool,
11609 userDataStr.GetString());
11613 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
11614 const VkImageCreateInfo& imageCreateInfo,
11618 CallParams callParams;
11619 GetBasicParams(callParams);
11621 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11622 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11623 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11624 imageCreateInfo.flags,
11625 imageCreateInfo.imageType,
11626 imageCreateInfo.format,
11627 imageCreateInfo.extent.width,
11628 imageCreateInfo.extent.height,
11629 imageCreateInfo.extent.depth,
11630 imageCreateInfo.mipLevels,
11631 imageCreateInfo.arrayLayers,
11632 imageCreateInfo.samples,
11633 imageCreateInfo.tiling,
11634 imageCreateInfo.usage,
11635 imageCreateInfo.sharingMode,
11636 imageCreateInfo.initialLayout,
11637 allocCreateInfo.
flags,
11638 allocCreateInfo.
usage,
11642 allocCreateInfo.
pool,
11644 userDataStr.GetString());
11648 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
11651 CallParams callParams;
11652 GetBasicParams(callParams);
11654 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11655 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
11660 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
11663 CallParams callParams;
11664 GetBasicParams(callParams);
11666 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11667 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
11672 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
11675 CallParams callParams;
11676 GetBasicParams(callParams);
11678 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11679 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11684 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
11687 CallParams callParams;
11688 GetBasicParams(callParams);
11690 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11691 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
11696 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
11699 CallParams callParams;
11700 GetBasicParams(callParams);
11702 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11703 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
11710 if(pUserData != VMA_NULL)
11714 m_Str = (
const char*)pUserData;
11718 sprintf_s(m_PtrStr,
"%p", pUserData);
11728 void VmaRecorder::WriteConfiguration(
11729 const VkPhysicalDeviceProperties& devProps,
11730 const VkPhysicalDeviceMemoryProperties& memProps,
11731 bool dedicatedAllocationExtensionEnabled)
11733 fprintf(m_File,
"Config,Begin\n");
11735 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
11736 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
11737 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
11738 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
11739 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
11740 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
11742 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
11743 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
11744 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
11746 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
11747 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
11749 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
11750 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
11752 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
11753 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
11755 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
11756 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
11759 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
11761 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
11762 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
11763 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
11764 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
11765 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
11766 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
11767 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
11768 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
11769 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
11771 fprintf(m_File,
"Config,End\n");
11774 void VmaRecorder::GetBasicParams(CallParams& outParams)
11776 outParams.threadId = GetCurrentThreadId();
11778 LARGE_INTEGER counter;
11779 QueryPerformanceCounter(&counter);
11780 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
11783 void VmaRecorder::Flush()
11791 #endif // #if VMA_RECORDING_ENABLED 11799 m_hDevice(pCreateInfo->device),
11800 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
11801 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
11802 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
11803 m_PreferredLargeHeapBlockSize(0),
11804 m_PhysicalDevice(pCreateInfo->physicalDevice),
11805 m_CurrentFrameIndex(0),
11806 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
11809 ,m_pRecorder(VMA_NULL)
11812 if(VMA_DEBUG_DETECT_CORRUPTION)
11815 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
11820 #if !(VMA_DEDICATED_ALLOCATION) 11823 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
11827 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
11828 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
11829 memset(&m_MemProps, 0,
sizeof(m_MemProps));
11831 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
11832 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
11834 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
11836 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
11847 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
11848 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
11850 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
11851 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
11852 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
11853 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
11860 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
11862 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
11863 if(limit != VK_WHOLE_SIZE)
11865 m_HeapSizeLimit[heapIndex] = limit;
11866 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
11868 m_MemProps.memoryHeaps[heapIndex].size = limit;
11874 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11876 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
11878 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
11881 preferredBlockSize,
11884 GetBufferImageGranularity(),
11891 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
11898 VkResult res = VK_SUCCESS;
11903 #if VMA_RECORDING_ENABLED 11904 m_pRecorder = vma_new(
this, VmaRecorder)();
11906 if(res != VK_SUCCESS)
11910 m_pRecorder->WriteConfiguration(
11911 m_PhysicalDeviceProperties,
11913 m_UseKhrDedicatedAllocation);
11914 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
11916 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
11917 return VK_ERROR_FEATURE_NOT_PRESENT;
11924 VmaAllocator_T::~VmaAllocator_T()
11926 #if VMA_RECORDING_ENABLED 11927 if(m_pRecorder != VMA_NULL)
11929 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
11930 vma_delete(
this, m_pRecorder);
11934 VMA_ASSERT(m_Pools.empty());
11936 for(
size_t i = GetMemoryTypeCount(); i--; )
11938 vma_delete(
this, m_pDedicatedAllocations[i]);
11939 vma_delete(
this, m_pBlockVectors[i]);
11943 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
11945 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11946 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
11947 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
11948 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
11949 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
11950 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
11951 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
11952 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
11953 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
11954 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
11955 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
11956 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
11957 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
11958 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
11959 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
11960 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
11961 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
11962 #if VMA_DEDICATED_ALLOCATION 11963 if(m_UseKhrDedicatedAllocation)
11965 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
11966 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
11967 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
11968 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
11970 #endif // #if VMA_DEDICATED_ALLOCATION 11971 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11973 #define VMA_COPY_IF_NOT_NULL(funcName) \ 11974 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 11976 if(pVulkanFunctions != VMA_NULL)
11978 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
11979 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
11980 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
11981 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
11982 VMA_COPY_IF_NOT_NULL(vkMapMemory);
11983 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
11984 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
11985 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
11986 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
11987 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
11988 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
11989 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
11990 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
11991 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
11992 VMA_COPY_IF_NOT_NULL(vkCreateImage);
11993 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
11994 #if VMA_DEDICATED_ALLOCATION 11995 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
11996 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
12000 #undef VMA_COPY_IF_NOT_NULL 12004 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
12005 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
12006 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
12007 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
12008 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
12009 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
12010 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
12011 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
12012 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
12013 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
12014 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
12015 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
12016 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
12017 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
12018 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
12019 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
12020 #if VMA_DEDICATED_ALLOCATION 12021 if(m_UseKhrDedicatedAllocation)
12023 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
12024 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
12029 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
12031 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12032 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
12033 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
12034 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
12037 VkResult VmaAllocator_T::AllocateMemoryOfType(
12039 VkDeviceSize alignment,
12040 bool dedicatedAllocation,
12041 VkBuffer dedicatedBuffer,
12042 VkImage dedicatedImage,
12044 uint32_t memTypeIndex,
12045 VmaSuballocationType suballocType,
12048 VMA_ASSERT(pAllocation != VMA_NULL);
12049 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
12055 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12060 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
12061 VMA_ASSERT(blockVector);
12063 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
12064 bool preferDedicatedMemory =
12065 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
12066 dedicatedAllocation ||
12068 size > preferredBlockSize / 2;
12070 if(preferDedicatedMemory &&
12072 finalCreateInfo.
pool == VK_NULL_HANDLE)
12081 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12085 return AllocateDedicatedMemory(
12099 VkResult res = blockVector->Allocate(
12101 m_CurrentFrameIndex.load(),
12107 if(res == VK_SUCCESS)
12115 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12119 res = AllocateDedicatedMemory(
12125 finalCreateInfo.pUserData,
12129 if(res == VK_SUCCESS)
12132 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
12138 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12145 VkResult VmaAllocator_T::AllocateDedicatedMemory(
12147 VmaSuballocationType suballocType,
12148 uint32_t memTypeIndex,
12150 bool isUserDataString,
12152 VkBuffer dedicatedBuffer,
12153 VkImage dedicatedImage,
12156 VMA_ASSERT(pAllocation);
12158 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12159 allocInfo.memoryTypeIndex = memTypeIndex;
12160 allocInfo.allocationSize = size;
12162 #if VMA_DEDICATED_ALLOCATION 12163 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
12164 if(m_UseKhrDedicatedAllocation)
12166 if(dedicatedBuffer != VK_NULL_HANDLE)
12168 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
12169 dedicatedAllocInfo.buffer = dedicatedBuffer;
12170 allocInfo.pNext = &dedicatedAllocInfo;
12172 else if(dedicatedImage != VK_NULL_HANDLE)
12174 dedicatedAllocInfo.image = dedicatedImage;
12175 allocInfo.pNext = &dedicatedAllocInfo;
12178 #endif // #if VMA_DEDICATED_ALLOCATION 12181 VkDeviceMemory hMemory = VK_NULL_HANDLE;
12182 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
12185 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12189 void* pMappedData = VMA_NULL;
12192 res = (*m_VulkanFunctions.vkMapMemory)(
12201 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
12202 FreeVulkanMemory(memTypeIndex, size, hMemory);
12207 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
12208 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
12209 (*pAllocation)->SetUserData(
this, pUserData);
12210 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12212 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12217 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12218 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12219 VMA_ASSERT(pDedicatedAllocations);
12220 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
12223 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
12228 void VmaAllocator_T::GetBufferMemoryRequirements(
12230 VkMemoryRequirements& memReq,
12231 bool& requiresDedicatedAllocation,
12232 bool& prefersDedicatedAllocation)
const 12234 #if VMA_DEDICATED_ALLOCATION 12235 if(m_UseKhrDedicatedAllocation)
12237 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
12238 memReqInfo.buffer = hBuffer;
12240 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12242 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12243 memReq2.pNext = &memDedicatedReq;
12245 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12247 memReq = memReq2.memoryRequirements;
12248 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12249 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12252 #endif // #if VMA_DEDICATED_ALLOCATION 12254 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
12255 requiresDedicatedAllocation =
false;
12256 prefersDedicatedAllocation =
false;
12260 void VmaAllocator_T::GetImageMemoryRequirements(
12262 VkMemoryRequirements& memReq,
12263 bool& requiresDedicatedAllocation,
12264 bool& prefersDedicatedAllocation)
const 12266 #if VMA_DEDICATED_ALLOCATION 12267 if(m_UseKhrDedicatedAllocation)
12269 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
12270 memReqInfo.image = hImage;
12272 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12274 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12275 memReq2.pNext = &memDedicatedReq;
12277 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12279 memReq = memReq2.memoryRequirements;
12280 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12281 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12284 #endif // #if VMA_DEDICATED_ALLOCATION 12286 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
12287 requiresDedicatedAllocation =
false;
12288 prefersDedicatedAllocation =
false;
12292 VkResult VmaAllocator_T::AllocateMemory(
12293 const VkMemoryRequirements& vkMemReq,
12294 bool requiresDedicatedAllocation,
12295 bool prefersDedicatedAllocation,
12296 VkBuffer dedicatedBuffer,
12297 VkImage dedicatedImage,
12299 VmaSuballocationType suballocType,
12302 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
12307 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
12308 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12313 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
12314 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12316 if(requiresDedicatedAllocation)
12320 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
12321 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12323 if(createInfo.
pool != VK_NULL_HANDLE)
12325 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
12326 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12329 if((createInfo.
pool != VK_NULL_HANDLE) &&
12332 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
12333 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12336 if(createInfo.
pool != VK_NULL_HANDLE)
12338 const VkDeviceSize alignmentForPool = VMA_MAX(
12339 vkMemReq.alignment,
12340 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
12341 return createInfo.
pool->m_BlockVector.Allocate(
12343 m_CurrentFrameIndex.load(),
12353 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
12354 uint32_t memTypeIndex = UINT32_MAX;
12356 if(res == VK_SUCCESS)
12358 VkDeviceSize alignmentForMemType = VMA_MAX(
12359 vkMemReq.alignment,
12360 GetMemoryTypeMinAlignment(memTypeIndex));
12362 res = AllocateMemoryOfType(
12364 alignmentForMemType,
12365 requiresDedicatedAllocation || prefersDedicatedAllocation,
12373 if(res == VK_SUCCESS)
12383 memoryTypeBits &= ~(1u << memTypeIndex);
12386 if(res == VK_SUCCESS)
12388 alignmentForMemType = VMA_MAX(
12389 vkMemReq.alignment,
12390 GetMemoryTypeMinAlignment(memTypeIndex));
12392 res = AllocateMemoryOfType(
12394 alignmentForMemType,
12395 requiresDedicatedAllocation || prefersDedicatedAllocation,
12403 if(res == VK_SUCCESS)
12413 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12424 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
12426 VMA_ASSERT(allocation);
12428 if(TouchAllocation(allocation))
12430 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12432 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
12435 switch(allocation->GetType())
12437 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12439 VmaBlockVector* pBlockVector = VMA_NULL;
12440 VmaPool hPool = allocation->GetPool();
12441 if(hPool != VK_NULL_HANDLE)
12443 pBlockVector = &hPool->m_BlockVector;
12447 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12448 pBlockVector = m_pBlockVectors[memTypeIndex];
12450 pBlockVector->Free(allocation);
12453 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12454 FreeDedicatedMemory(allocation);
12461 allocation->SetUserData(
this, VMA_NULL);
12462 vma_delete(
this, allocation);
12465 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
12468 InitStatInfo(pStats->
total);
12469 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
12471 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12475 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12477 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12478 VMA_ASSERT(pBlockVector);
12479 pBlockVector->AddStats(pStats);
12484 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12485 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12487 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
12492 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12494 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12495 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12496 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12497 VMA_ASSERT(pDedicatedAllocVector);
12498 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
12501 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
12502 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12503 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12504 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12509 VmaPostprocessCalcStatInfo(pStats->
total);
12510 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
12511 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
12512 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
12513 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
12516 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
12518 VkResult VmaAllocator_T::Defragment(
12520 size_t allocationCount,
12521 VkBool32* pAllocationsChanged,
12525 if(pAllocationsChanged != VMA_NULL)
12527 memset(pAllocationsChanged, 0, allocationCount *
sizeof(VkBool32));
12529 if(pDefragmentationStats != VMA_NULL)
12531 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
12534 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
12536 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
12538 const size_t poolCount = m_Pools.size();
12541 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12544 VMA_ASSERT(hAlloc);
12545 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
12547 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12548 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
12550 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
12552 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
12554 VmaBlockVector* pAllocBlockVector = VMA_NULL;
12556 const VmaPool hAllocPool = hAlloc->GetPool();
12558 if(hAllocPool != VK_NULL_HANDLE)
12561 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
12563 pAllocBlockVector = &hAllocPool->m_BlockVector;
12569 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
12572 if(pAllocBlockVector != VMA_NULL)
12574 VmaDefragmentator*
const pDefragmentator =
12575 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
12576 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
12577 &pAllocationsChanged[allocIndex] : VMA_NULL;
12578 pDefragmentator->AddAllocation(hAlloc, pChanged);
12583 VkResult result = VK_SUCCESS;
12587 VkDeviceSize maxBytesToMove = SIZE_MAX;
12588 uint32_t maxAllocationsToMove = UINT32_MAX;
12589 if(pDefragmentationInfo != VMA_NULL)
12596 for(uint32_t memTypeIndex = 0;
12597 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
12601 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12603 result = m_pBlockVectors[memTypeIndex]->Defragment(
12604 pDefragmentationStats,
12606 maxAllocationsToMove);
12611 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
12613 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
12614 pDefragmentationStats,
12616 maxAllocationsToMove);
12622 for(
size_t poolIndex = poolCount; poolIndex--; )
12624 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
12628 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
12630 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12632 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
12641 if(hAllocation->CanBecomeLost())
12647 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12648 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12651 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12655 pAllocationInfo->
offset = 0;
12656 pAllocationInfo->
size = hAllocation->GetSize();
12658 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12661 else if(localLastUseFrameIndex == localCurrFrameIndex)
12663 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12664 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12665 pAllocationInfo->
offset = hAllocation->GetOffset();
12666 pAllocationInfo->
size = hAllocation->GetSize();
12668 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12673 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12675 localLastUseFrameIndex = localCurrFrameIndex;
12682 #if VMA_STATS_STRING_ENABLED 12683 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12684 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12687 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12688 if(localLastUseFrameIndex == localCurrFrameIndex)
12694 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12696 localLastUseFrameIndex = localCurrFrameIndex;
12702 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12703 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12704 pAllocationInfo->
offset = hAllocation->GetOffset();
12705 pAllocationInfo->
size = hAllocation->GetSize();
12706 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
12707 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12711 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
12714 if(hAllocation->CanBecomeLost())
12716 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12717 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12720 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12724 else if(localLastUseFrameIndex == localCurrFrameIndex)
12730 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12732 localLastUseFrameIndex = localCurrFrameIndex;
12739 #if VMA_STATS_STRING_ENABLED 12740 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12741 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12744 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12745 if(localLastUseFrameIndex == localCurrFrameIndex)
12751 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12753 localLastUseFrameIndex = localCurrFrameIndex;
12765 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
12775 return VK_ERROR_INITIALIZATION_FAILED;
12778 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
12780 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
12782 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
12783 if(res != VK_SUCCESS)
12785 vma_delete(
this, *pPool);
12792 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12793 (*pPool)->SetId(m_NextPoolId++);
12794 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
12800 void VmaAllocator_T::DestroyPool(
VmaPool pool)
12804 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12805 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
12806 VMA_ASSERT(success &&
"Pool not found in Allocator.");
12809 vma_delete(
this, pool);
12814 pool->m_BlockVector.GetPoolStats(pPoolStats);
12817 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
12819 m_CurrentFrameIndex.store(frameIndex);
12822 void VmaAllocator_T::MakePoolAllocationsLost(
12824 size_t* pLostAllocationCount)
12826 hPool->m_BlockVector.MakePoolAllocationsLost(
12827 m_CurrentFrameIndex.load(),
12828 pLostAllocationCount);
12831 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
12833 return hPool->m_BlockVector.CheckCorruption();
12836 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
12838 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
12841 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12843 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
12845 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12846 VMA_ASSERT(pBlockVector);
12847 VkResult localRes = pBlockVector->CheckCorruption();
12850 case VK_ERROR_FEATURE_NOT_PRESENT:
12853 finalRes = VK_SUCCESS;
12863 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12864 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12866 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
12868 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
12871 case VK_ERROR_FEATURE_NOT_PRESENT:
12874 finalRes = VK_SUCCESS;
12886 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
12888 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
12889 (*pAllocation)->InitLost();
12892 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
12894 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
12897 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
12899 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
12900 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
12902 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
12903 if(res == VK_SUCCESS)
12905 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
12910 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
12915 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
12918 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
12920 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
12926 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
12928 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
12930 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
12933 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
12935 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
12936 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
12938 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
12939 m_HeapSizeLimit[heapIndex] += size;
12943 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
12945 if(hAllocation->CanBecomeLost())
12947 return VK_ERROR_MEMORY_MAP_FAILED;
12950 switch(hAllocation->GetType())
12952 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12954 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12955 char *pBytes = VMA_NULL;
12956 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
12957 if(res == VK_SUCCESS)
12959 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
12960 hAllocation->BlockAllocMap();
12964 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12965 return hAllocation->DedicatedAllocMap(
this, ppData);
12968 return VK_ERROR_MEMORY_MAP_FAILED;
12974 switch(hAllocation->GetType())
12976 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12978 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12979 hAllocation->BlockAllocUnmap();
12980 pBlock->Unmap(
this, 1);
12983 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12984 hAllocation->DedicatedAllocUnmap(
this);
12991 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
12993 VkResult res = VK_SUCCESS;
12994 switch(hAllocation->GetType())
12996 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12997 res = GetVulkanFunctions().vkBindBufferMemory(
13000 hAllocation->GetMemory(),
13003 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13005 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13006 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
13007 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
13016 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
13018 VkResult res = VK_SUCCESS;
13019 switch(hAllocation->GetType())
13021 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13022 res = GetVulkanFunctions().vkBindImageMemory(
13025 hAllocation->GetMemory(),
13028 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13030 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13031 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
13032 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
13041 void VmaAllocator_T::FlushOrInvalidateAllocation(
13043 VkDeviceSize offset, VkDeviceSize size,
13044 VMA_CACHE_OPERATION op)
13046 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
13047 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
13049 const VkDeviceSize allocationSize = hAllocation->GetSize();
13050 VMA_ASSERT(offset <= allocationSize);
13052 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13054 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13055 memRange.memory = hAllocation->GetMemory();
13057 switch(hAllocation->GetType())
13059 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13060 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13061 if(size == VK_WHOLE_SIZE)
13063 memRange.size = allocationSize - memRange.offset;
13067 VMA_ASSERT(offset + size <= allocationSize);
13068 memRange.size = VMA_MIN(
13069 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
13070 allocationSize - memRange.offset);
13074 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13077 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13078 if(size == VK_WHOLE_SIZE)
13080 size = allocationSize - offset;
13084 VMA_ASSERT(offset + size <= allocationSize);
13086 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
13089 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
13090 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
13091 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
13092 memRange.offset += allocationOffset;
13093 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
13104 case VMA_CACHE_FLUSH:
13105 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
13107 case VMA_CACHE_INVALIDATE:
13108 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
13117 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
13119 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
13121 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
13123 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13124 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
13125 VMA_ASSERT(pDedicatedAllocations);
13126 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
13127 VMA_ASSERT(success);
13130 VkDeviceMemory hMemory = allocation->GetMemory();
13142 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
13144 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
13147 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
13149 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
13150 !hAllocation->CanBecomeLost() &&
13151 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13153 void* pData = VMA_NULL;
13154 VkResult res = Map(hAllocation, &pData);
13155 if(res == VK_SUCCESS)
13157 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
13158 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
13159 Unmap(hAllocation);
13163 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
13168 #if VMA_STATS_STRING_ENABLED 13170 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
13172 bool dedicatedAllocationsStarted =
false;
13173 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13175 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13176 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
13177 VMA_ASSERT(pDedicatedAllocVector);
13178 if(pDedicatedAllocVector->empty() ==
false)
13180 if(dedicatedAllocationsStarted ==
false)
13182 dedicatedAllocationsStarted =
true;
13183 json.WriteString(
"DedicatedAllocations");
13184 json.BeginObject();
13187 json.BeginString(
"Type ");
13188 json.ContinueString(memTypeIndex);
13193 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
13195 json.BeginObject(
true);
13197 hAlloc->PrintParameters(json);
13204 if(dedicatedAllocationsStarted)
13210 bool allocationsStarted =
false;
13211 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13213 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
13215 if(allocationsStarted ==
false)
13217 allocationsStarted =
true;
13218 json.WriteString(
"DefaultPools");
13219 json.BeginObject();
13222 json.BeginString(
"Type ");
13223 json.ContinueString(memTypeIndex);
13226 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
13229 if(allocationsStarted)
13237 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13238 const size_t poolCount = m_Pools.size();
13241 json.WriteString(
"Pools");
13242 json.BeginObject();
13243 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13245 json.BeginString();
13246 json.ContinueString(m_Pools[poolIndex]->GetId());
13249 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
13256 #endif // #if VMA_STATS_STRING_ENABLED 13265 VMA_ASSERT(pCreateInfo && pAllocator);
13266 VMA_DEBUG_LOG(
"vmaCreateAllocator");
13268 return (*pAllocator)->Init(pCreateInfo);
13274 if(allocator != VK_NULL_HANDLE)
13276 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
13277 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
13278 vma_delete(&allocationCallbacks, allocator);
13284 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
13286 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
13287 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
13292 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
13294 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
13295 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
13300 uint32_t memoryTypeIndex,
13301 VkMemoryPropertyFlags* pFlags)
13303 VMA_ASSERT(allocator && pFlags);
13304 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
13305 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
13310 uint32_t frameIndex)
13312 VMA_ASSERT(allocator);
13313 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
13315 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13317 allocator->SetCurrentFrameIndex(frameIndex);
13324 VMA_ASSERT(allocator && pStats);
13325 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13326 allocator->CalculateStats(pStats);
13329 #if VMA_STATS_STRING_ENABLED 13333 char** ppStatsString,
13334 VkBool32 detailedMap)
13336 VMA_ASSERT(allocator && ppStatsString);
13337 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13339 VmaStringBuilder sb(allocator);
13341 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
13342 json.BeginObject();
13345 allocator->CalculateStats(&stats);
13347 json.WriteString(
"Total");
13348 VmaPrintStatInfo(json, stats.
total);
13350 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
13352 json.BeginString(
"Heap ");
13353 json.ContinueString(heapIndex);
13355 json.BeginObject();
13357 json.WriteString(
"Size");
13358 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
13360 json.WriteString(
"Flags");
13361 json.BeginArray(
true);
13362 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
13364 json.WriteString(
"DEVICE_LOCAL");
13370 json.WriteString(
"Stats");
13371 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
13374 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
13376 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
13378 json.BeginString(
"Type ");
13379 json.ContinueString(typeIndex);
13382 json.BeginObject();
13384 json.WriteString(
"Flags");
13385 json.BeginArray(
true);
13386 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
13387 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
13389 json.WriteString(
"DEVICE_LOCAL");
13391 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13393 json.WriteString(
"HOST_VISIBLE");
13395 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
13397 json.WriteString(
"HOST_COHERENT");
13399 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
13401 json.WriteString(
"HOST_CACHED");
13403 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
13405 json.WriteString(
"LAZILY_ALLOCATED");
13411 json.WriteString(
"Stats");
13412 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
13421 if(detailedMap == VK_TRUE)
13423 allocator->PrintDetailedMap(json);
13429 const size_t len = sb.GetLength();
13430 char*
const pChars = vma_new_array(allocator,
char, len + 1);
13433 memcpy(pChars, sb.GetData(), len);
13435 pChars[len] =
'\0';
13436 *ppStatsString = pChars;
13441 char* pStatsString)
13443 if(pStatsString != VMA_NULL)
13445 VMA_ASSERT(allocator);
13446 size_t len = strlen(pStatsString);
13447 vma_delete_array(allocator, pStatsString, len + 1);
13451 #endif // #if VMA_STATS_STRING_ENABLED 13458 uint32_t memoryTypeBits,
13460 uint32_t* pMemoryTypeIndex)
13462 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13463 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13464 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13471 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
13472 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
13477 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13481 switch(pAllocationCreateInfo->
usage)
13486 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13488 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13492 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13495 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13496 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13498 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13502 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13503 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
13509 *pMemoryTypeIndex = UINT32_MAX;
13510 uint32_t minCost = UINT32_MAX;
13511 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
13512 memTypeIndex < allocator->GetMemoryTypeCount();
13513 ++memTypeIndex, memTypeBit <<= 1)
13516 if((memTypeBit & memoryTypeBits) != 0)
13518 const VkMemoryPropertyFlags currFlags =
13519 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
13521 if((requiredFlags & ~currFlags) == 0)
13524 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
13526 if(currCost < minCost)
13528 *pMemoryTypeIndex = memTypeIndex;
13533 minCost = currCost;
13538 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
13543 const VkBufferCreateInfo* pBufferCreateInfo,
13545 uint32_t* pMemoryTypeIndex)
13547 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13548 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
13549 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13550 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13552 const VkDevice hDev = allocator->m_hDevice;
13553 VkBuffer hBuffer = VK_NULL_HANDLE;
13554 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
13555 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
13556 if(res == VK_SUCCESS)
13558 VkMemoryRequirements memReq = {};
13559 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
13560 hDev, hBuffer, &memReq);
13564 memReq.memoryTypeBits,
13565 pAllocationCreateInfo,
13568 allocator->GetVulkanFunctions().vkDestroyBuffer(
13569 hDev, hBuffer, allocator->GetAllocationCallbacks());
13576 const VkImageCreateInfo* pImageCreateInfo,
13578 uint32_t* pMemoryTypeIndex)
13580 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13581 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
13582 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13583 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13585 const VkDevice hDev = allocator->m_hDevice;
13586 VkImage hImage = VK_NULL_HANDLE;
13587 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
13588 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
13589 if(res == VK_SUCCESS)
13591 VkMemoryRequirements memReq = {};
13592 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
13593 hDev, hImage, &memReq);
13597 memReq.memoryTypeBits,
13598 pAllocationCreateInfo,
13601 allocator->GetVulkanFunctions().vkDestroyImage(
13602 hDev, hImage, allocator->GetAllocationCallbacks());
13612 VMA_ASSERT(allocator && pCreateInfo && pPool);
13614 VMA_DEBUG_LOG(
"vmaCreatePool");
13616 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13618 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
13620 #if VMA_RECORDING_ENABLED 13621 if(allocator->GetRecorder() != VMA_NULL)
13623 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
13634 VMA_ASSERT(allocator);
13636 if(pool == VK_NULL_HANDLE)
13641 VMA_DEBUG_LOG(
"vmaDestroyPool");
13643 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13645 #if VMA_RECORDING_ENABLED 13646 if(allocator->GetRecorder() != VMA_NULL)
13648 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
13652 allocator->DestroyPool(pool);
13660 VMA_ASSERT(allocator && pool && pPoolStats);
13662 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13664 allocator->GetPoolStats(pool, pPoolStats);
13670 size_t* pLostAllocationCount)
13672 VMA_ASSERT(allocator && pool);
13674 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13676 #if VMA_RECORDING_ENABLED 13677 if(allocator->GetRecorder() != VMA_NULL)
13679 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
13683 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
13688 VMA_ASSERT(allocator && pool);
13690 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13692 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
13694 return allocator->CheckPoolCorruption(pool);
13699 const VkMemoryRequirements* pVkMemoryRequirements,
13704 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
13706 VMA_DEBUG_LOG(
"vmaAllocateMemory");
13708 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13710 VkResult result = allocator->AllocateMemory(
13711 *pVkMemoryRequirements,
13717 VMA_SUBALLOCATION_TYPE_UNKNOWN,
13720 #if VMA_RECORDING_ENABLED 13721 if(allocator->GetRecorder() != VMA_NULL)
13723 allocator->GetRecorder()->RecordAllocateMemory(
13724 allocator->GetCurrentFrameIndex(),
13725 *pVkMemoryRequirements,
13731 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
13733 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13746 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13748 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
13750 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13752 VkMemoryRequirements vkMemReq = {};
13753 bool requiresDedicatedAllocation =
false;
13754 bool prefersDedicatedAllocation =
false;
13755 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
13756 requiresDedicatedAllocation,
13757 prefersDedicatedAllocation);
13759 VkResult result = allocator->AllocateMemory(
13761 requiresDedicatedAllocation,
13762 prefersDedicatedAllocation,
13766 VMA_SUBALLOCATION_TYPE_BUFFER,
13769 #if VMA_RECORDING_ENABLED 13770 if(allocator->GetRecorder() != VMA_NULL)
13772 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
13773 allocator->GetCurrentFrameIndex(),
13775 requiresDedicatedAllocation,
13776 prefersDedicatedAllocation,
13782 if(pAllocationInfo && result == VK_SUCCESS)
13784 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13797 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13799 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
13801 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13803 VkMemoryRequirements vkMemReq = {};
13804 bool requiresDedicatedAllocation =
false;
13805 bool prefersDedicatedAllocation =
false;
13806 allocator->GetImageMemoryRequirements(image, vkMemReq,
13807 requiresDedicatedAllocation, prefersDedicatedAllocation);
13809 VkResult result = allocator->AllocateMemory(
13811 requiresDedicatedAllocation,
13812 prefersDedicatedAllocation,
13816 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
13819 #if VMA_RECORDING_ENABLED 13820 if(allocator->GetRecorder() != VMA_NULL)
13822 allocator->GetRecorder()->RecordAllocateMemoryForImage(
13823 allocator->GetCurrentFrameIndex(),
13825 requiresDedicatedAllocation,
13826 prefersDedicatedAllocation,
13832 if(pAllocationInfo && result == VK_SUCCESS)
13834 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13844 VMA_ASSERT(allocator);
13846 if(allocation == VK_NULL_HANDLE)
13851 VMA_DEBUG_LOG(
"vmaFreeMemory");
13853 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13855 #if VMA_RECORDING_ENABLED 13856 if(allocator->GetRecorder() != VMA_NULL)
13858 allocator->GetRecorder()->RecordFreeMemory(
13859 allocator->GetCurrentFrameIndex(),
13864 allocator->FreeMemory(allocation);
13872 VMA_ASSERT(allocator && allocation && pAllocationInfo);
13874 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13876 #if VMA_RECORDING_ENABLED 13877 if(allocator->GetRecorder() != VMA_NULL)
13879 allocator->GetRecorder()->RecordGetAllocationInfo(
13880 allocator->GetCurrentFrameIndex(),
13885 allocator->GetAllocationInfo(allocation, pAllocationInfo);
13892 VMA_ASSERT(allocator && allocation);
13894 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13896 #if VMA_RECORDING_ENABLED 13897 if(allocator->GetRecorder() != VMA_NULL)
13899 allocator->GetRecorder()->RecordTouchAllocation(
13900 allocator->GetCurrentFrameIndex(),
13905 return allocator->TouchAllocation(allocation);
13913 VMA_ASSERT(allocator && allocation);
13915 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13917 allocation->SetUserData(allocator, pUserData);
13919 #if VMA_RECORDING_ENABLED 13920 if(allocator->GetRecorder() != VMA_NULL)
13922 allocator->GetRecorder()->RecordSetAllocationUserData(
13923 allocator->GetCurrentFrameIndex(),
13934 VMA_ASSERT(allocator && pAllocation);
13936 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
13938 allocator->CreateLostAllocation(pAllocation);
13940 #if VMA_RECORDING_ENABLED 13941 if(allocator->GetRecorder() != VMA_NULL)
13943 allocator->GetRecorder()->RecordCreateLostAllocation(
13944 allocator->GetCurrentFrameIndex(),
13955 VMA_ASSERT(allocator && allocation && ppData);
13957 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13959 VkResult res = allocator->Map(allocation, ppData);
13961 #if VMA_RECORDING_ENABLED 13962 if(allocator->GetRecorder() != VMA_NULL)
13964 allocator->GetRecorder()->RecordMapMemory(
13965 allocator->GetCurrentFrameIndex(),
13977 VMA_ASSERT(allocator && allocation);
13979 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13981 #if VMA_RECORDING_ENABLED 13982 if(allocator->GetRecorder() != VMA_NULL)
13984 allocator->GetRecorder()->RecordUnmapMemory(
13985 allocator->GetCurrentFrameIndex(),
13990 allocator->Unmap(allocation);
13995 VMA_ASSERT(allocator && allocation);
13997 VMA_DEBUG_LOG(
"vmaFlushAllocation");
13999 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14001 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
14003 #if VMA_RECORDING_ENABLED 14004 if(allocator->GetRecorder() != VMA_NULL)
14006 allocator->GetRecorder()->RecordFlushAllocation(
14007 allocator->GetCurrentFrameIndex(),
14008 allocation, offset, size);
14015 VMA_ASSERT(allocator && allocation);
14017 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
14019 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14021 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
14023 #if VMA_RECORDING_ENABLED 14024 if(allocator->GetRecorder() != VMA_NULL)
14026 allocator->GetRecorder()->RecordInvalidateAllocation(
14027 allocator->GetCurrentFrameIndex(),
14028 allocation, offset, size);
14035 VMA_ASSERT(allocator);
14037 VMA_DEBUG_LOG(
"vmaCheckCorruption");
14039 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14041 return allocator->CheckCorruption(memoryTypeBits);
14047 size_t allocationCount,
14048 VkBool32* pAllocationsChanged,
14052 VMA_ASSERT(allocator && pAllocations);
14054 VMA_DEBUG_LOG(
"vmaDefragment");
14056 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14058 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
14066 VMA_ASSERT(allocator && allocation && buffer);
14068 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
14070 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14072 return allocator->BindBufferMemory(allocation, buffer);
14080 VMA_ASSERT(allocator && allocation && image);
14082 VMA_DEBUG_LOG(
"vmaBindImageMemory");
14084 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14086 return allocator->BindImageMemory(allocation, image);
14091 const VkBufferCreateInfo* pBufferCreateInfo,
14097 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
14099 VMA_DEBUG_LOG(
"vmaCreateBuffer");
14101 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14103 *pBuffer = VK_NULL_HANDLE;
14104 *pAllocation = VK_NULL_HANDLE;
14107 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
14108 allocator->m_hDevice,
14110 allocator->GetAllocationCallbacks(),
14115 VkMemoryRequirements vkMemReq = {};
14116 bool requiresDedicatedAllocation =
false;
14117 bool prefersDedicatedAllocation =
false;
14118 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
14119 requiresDedicatedAllocation, prefersDedicatedAllocation);
14123 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
14125 VMA_ASSERT(vkMemReq.alignment %
14126 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
14128 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
14130 VMA_ASSERT(vkMemReq.alignment %
14131 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
14133 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
14135 VMA_ASSERT(vkMemReq.alignment %
14136 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
14140 res = allocator->AllocateMemory(
14142 requiresDedicatedAllocation,
14143 prefersDedicatedAllocation,
14146 *pAllocationCreateInfo,
14147 VMA_SUBALLOCATION_TYPE_BUFFER,
14150 #if VMA_RECORDING_ENABLED 14151 if(allocator->GetRecorder() != VMA_NULL)
14153 allocator->GetRecorder()->RecordCreateBuffer(
14154 allocator->GetCurrentFrameIndex(),
14155 *pBufferCreateInfo,
14156 *pAllocationCreateInfo,
14164 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
14168 #if VMA_STATS_STRING_ENABLED 14169 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
14171 if(pAllocationInfo != VMA_NULL)
14173 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14178 allocator->FreeMemory(*pAllocation);
14179 *pAllocation = VK_NULL_HANDLE;
14180 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14181 *pBuffer = VK_NULL_HANDLE;
14184 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14185 *pBuffer = VK_NULL_HANDLE;
14196 VMA_ASSERT(allocator);
14198 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14203 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
14205 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14207 #if VMA_RECORDING_ENABLED 14208 if(allocator->GetRecorder() != VMA_NULL)
14210 allocator->GetRecorder()->RecordDestroyBuffer(
14211 allocator->GetCurrentFrameIndex(),
14216 if(buffer != VK_NULL_HANDLE)
14218 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
14221 if(allocation != VK_NULL_HANDLE)
14223 allocator->FreeMemory(allocation);
14229 const VkImageCreateInfo* pImageCreateInfo,
14235 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
14237 VMA_DEBUG_LOG(
"vmaCreateImage");
14239 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14241 *pImage = VK_NULL_HANDLE;
14242 *pAllocation = VK_NULL_HANDLE;
14245 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
14246 allocator->m_hDevice,
14248 allocator->GetAllocationCallbacks(),
14252 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
14253 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
14254 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
14257 VkMemoryRequirements vkMemReq = {};
14258 bool requiresDedicatedAllocation =
false;
14259 bool prefersDedicatedAllocation =
false;
14260 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
14261 requiresDedicatedAllocation, prefersDedicatedAllocation);
14263 res = allocator->AllocateMemory(
14265 requiresDedicatedAllocation,
14266 prefersDedicatedAllocation,
14269 *pAllocationCreateInfo,
14273 #if VMA_RECORDING_ENABLED 14274 if(allocator->GetRecorder() != VMA_NULL)
14276 allocator->GetRecorder()->RecordCreateImage(
14277 allocator->GetCurrentFrameIndex(),
14279 *pAllocationCreateInfo,
14287 res = allocator->BindImageMemory(*pAllocation, *pImage);
14291 #if VMA_STATS_STRING_ENABLED 14292 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
14294 if(pAllocationInfo != VMA_NULL)
14296 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14301 allocator->FreeMemory(*pAllocation);
14302 *pAllocation = VK_NULL_HANDLE;
14303 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14304 *pImage = VK_NULL_HANDLE;
14307 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14308 *pImage = VK_NULL_HANDLE;
14319 VMA_ASSERT(allocator);
14321 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14326 VMA_DEBUG_LOG(
"vmaDestroyImage");
14328 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14330 #if VMA_RECORDING_ENABLED 14331 if(allocator->GetRecorder() != VMA_NULL)
14333 allocator->GetRecorder()->RecordDestroyImage(
14334 allocator->GetCurrentFrameIndex(),
14339 if(image != VK_NULL_HANDLE)
14341 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
14343 if(allocation != VK_NULL_HANDLE)
14345 allocator->FreeMemory(allocation);
14349 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1575
+
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1876
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
-
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1628
+
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1632
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
-
Definition: vk_mem_alloc.h:1602
-
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2194
-
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1583
+
Definition: vk_mem_alloc.h:1606
+
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2198
+
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1587
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
-
Definition: vk_mem_alloc.h:1829
-
Definition: vk_mem_alloc.h:1932
-
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1575
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2294
-
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1625
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2539
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2083
-
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1472
+
Definition: vk_mem_alloc.h:1833
+
Definition: vk_mem_alloc.h:1936
+
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1579
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2298
+
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1629
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2543
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2087
+
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1476
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2175
-
Definition: vk_mem_alloc.h:1909
-
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1564
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1982
-
Definition: vk_mem_alloc.h:1856
-
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1637
-
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2111
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2179
+
Definition: vk_mem_alloc.h:1913
+
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1568
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1986
+
Definition: vk_mem_alloc.h:1860
+
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1641
+
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2115
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
-
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1690
-
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1622
+
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1694
+
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1626
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
-
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1860
+
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1864
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
-
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1762
-
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1580
-
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1761
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2543
+
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1766
+
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1584
+
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1765
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2547
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1654
-
VmaStatInfo total
Definition: vk_mem_alloc.h:1771
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2551
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1966
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2534
-
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1581
-
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1506
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1658
+
VmaStatInfo total
Definition: vk_mem_alloc.h:1775
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2555
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1970
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2538
+
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1585
+
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1510
Represents main object of this library initialized.
-
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1631
+
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1635
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2125
-
Definition: vk_mem_alloc.h:2119
-
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1697
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2304
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2129
+
Definition: vk_mem_alloc.h:2123
+
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1701
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2308
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
-
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1576
-
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1600
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2003
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2145
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2181
+
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1580
+
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1604
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2007
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2149
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2185
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
-
Definition: vk_mem_alloc.h:1562
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2128
+
Definition: vk_mem_alloc.h:1566
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2132
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
-
VmaMemoryUsage
Definition: vk_mem_alloc.h:1807
+
VmaMemoryUsage
Definition: vk_mem_alloc.h:1811
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2529
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2533
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2547
-
Definition: vk_mem_alloc.h:1846
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1990
-
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1579
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2551
+
Definition: vk_mem_alloc.h:1850
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1994
+
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1583
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
-
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1767
-
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1512
+
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1771
+
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1516
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
Definition: vk_mem_alloc.h:1950
+
Definition: vk_mem_alloc.h:1954
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
-
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1533
+
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1537
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
-
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1604
-
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1538
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2549
+
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1608
+
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1542
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2553
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1977
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2191
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1981
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2195
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
-
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1572
-
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1750
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2140
-
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1525
-
Definition: vk_mem_alloc.h:2115
+
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1576
+
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1754
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2144
+
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1529
+
Definition: vk_mem_alloc.h:2119
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1916
-
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1763
-
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1529
-
Definition: vk_mem_alloc.h:1940
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2131
-
Definition: vk_mem_alloc.h:1855
-
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1578
+
Definition: vk_mem_alloc.h:1920
+
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1767
+
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1533
+
Definition: vk_mem_alloc.h:1944
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2135
+
Definition: vk_mem_alloc.h:1859
+
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1582
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1972
-
Definition: vk_mem_alloc.h:1963
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1976
+
Definition: vk_mem_alloc.h:1967
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
-
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1753
-
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1574
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2153
-
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1640
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2184
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1961
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1996
+
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1757
+
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1578
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2157
+
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1644
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2188
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1965
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2000
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
-
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1678
-
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1769
-
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1896
-
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1762
+
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1682
+
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1773
+
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1900
+
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1766
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
-
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1585
-
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1610
-
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1527
-
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1584
+
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1589
+
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1614
+
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1531
+
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1588
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2167
-
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1577
-
Definition: vk_mem_alloc.h:1927
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2171
+
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1581
+
Definition: vk_mem_alloc.h:1931
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
-
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1618
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2318
-
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1634
-
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1762
-
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1759
+
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1622
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2322
+
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1638
+
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1766
+
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1763
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2172
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2176
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
-
Definition: vk_mem_alloc.h:1936
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2299
-
Definition: vk_mem_alloc.h:1947
-
Definition: vk_mem_alloc.h:1959
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2545
-
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1570
+
Definition: vk_mem_alloc.h:1940
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2303
+
Definition: vk_mem_alloc.h:1951
+
Definition: vk_mem_alloc.h:1963
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2549
+
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1574
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
-
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1757
-
Definition: vk_mem_alloc.h:1812
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2121
+
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1761
+
Definition: vk_mem_alloc.h:1816
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2125
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
-
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1607
-
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1755
-
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1582
-
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1586
-
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1883
-
Definition: vk_mem_alloc.h:1954
-
Definition: vk_mem_alloc.h:1839
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2313
+
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1611
+
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1759
+
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1586
+
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1590
+
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1887
+
Definition: vk_mem_alloc.h:1958
+
Definition: vk_mem_alloc.h:1843
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2317
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
-
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1560
+
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1564
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
-
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1573
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2100
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2280
+
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1577
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2104
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2284
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1944
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2065
-
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1763
+
Definition: vk_mem_alloc.h:1948
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2069
+
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1767
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
-
Definition: vk_mem_alloc.h:1922
-
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1594
-
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1770
+
Definition: vk_mem_alloc.h:1926
+
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1598
+
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1774
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2178
-
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1763
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2182
+
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1767
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2285
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2289