23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1464 #ifndef VMA_RECORDING_ENABLED 1466 #define VMA_RECORDING_ENABLED 1 1468 #define VMA_RECORDING_ENABLED 0 1473 #define NOMINMAX // For windows.h 1476 #include <vulkan/vulkan.h> 1478 #if VMA_RECORDING_ENABLED 1479 #include <windows.h> 1482 #if !defined(VMA_DEDICATED_ALLOCATION) 1483 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1484 #define VMA_DEDICATED_ALLOCATION 1 1486 #define VMA_DEDICATED_ALLOCATION 0 1504 uint32_t memoryType,
1505 VkDeviceMemory memory,
1510 uint32_t memoryType,
1511 VkDeviceMemory memory,
1583 #if VMA_DEDICATED_ALLOCATION 1584 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1585 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1711 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1719 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1729 uint32_t memoryTypeIndex,
1730 VkMemoryPropertyFlags* pFlags);
1742 uint32_t frameIndex);
1775 #define VMA_STATS_STRING_ENABLED 1 1777 #if VMA_STATS_STRING_ENABLED 1784 char** ppStatsString,
1785 VkBool32 detailedMap);
1789 char* pStatsString);
1791 #endif // #if VMA_STATS_STRING_ENABLED 2020 uint32_t memoryTypeBits,
2022 uint32_t* pMemoryTypeIndex);
2038 const VkBufferCreateInfo* pBufferCreateInfo,
2040 uint32_t* pMemoryTypeIndex);
2056 const VkImageCreateInfo* pImageCreateInfo,
2058 uint32_t* pMemoryTypeIndex);
2230 size_t* pLostAllocationCount);
2329 const VkMemoryRequirements* pVkMemoryRequirements,
2359 const VkMemoryRequirements* pVkMemoryRequirements,
2361 size_t allocationCount,
2408 size_t allocationCount,
2642 size_t allocationCount,
2643 VkBool32* pAllocationsChanged,
2709 const VkBufferCreateInfo* pBufferCreateInfo,
2734 const VkImageCreateInfo* pImageCreateInfo,
2760 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2763 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2764 #define VMA_IMPLEMENTATION 2767 #ifdef VMA_IMPLEMENTATION 2768 #undef VMA_IMPLEMENTATION 2790 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2791 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2803 #if VMA_USE_STL_CONTAINERS 2804 #define VMA_USE_STL_VECTOR 1 2805 #define VMA_USE_STL_UNORDERED_MAP 1 2806 #define VMA_USE_STL_LIST 1 2809 #if VMA_USE_STL_VECTOR 2813 #if VMA_USE_STL_UNORDERED_MAP 2814 #include <unordered_map> 2817 #if VMA_USE_STL_LIST 2826 #include <algorithm> 2832 #define VMA_NULL nullptr 2835 #if defined(__APPLE__) || defined(__ANDROID__) 2837 void *aligned_alloc(
size_t alignment,
size_t size)
2840 if(alignment <
sizeof(
void*))
2842 alignment =
sizeof(
void*);
2846 if(posix_memalign(&pointer, alignment, size) == 0)
2860 #define VMA_ASSERT(expr) assert(expr) 2862 #define VMA_ASSERT(expr) 2868 #ifndef VMA_HEAVY_ASSERT 2870 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2872 #define VMA_HEAVY_ASSERT(expr) 2876 #ifndef VMA_ALIGN_OF 2877 #define VMA_ALIGN_OF(type) (__alignof(type)) 2880 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2882 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2884 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2888 #ifndef VMA_SYSTEM_FREE 2890 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2892 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2897 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2901 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2905 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2909 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2912 #ifndef VMA_DEBUG_LOG 2913 #define VMA_DEBUG_LOG(format, ...) 2923 #if VMA_STATS_STRING_ENABLED 2924 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2926 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2928 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2930 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2932 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2934 snprintf(outStr, strLen,
"%p", ptr);
2944 void Lock() { m_Mutex.lock(); }
2945 void Unlock() { m_Mutex.unlock(); }
2949 #define VMA_MUTEX VmaMutex 2960 #ifndef VMA_ATOMIC_UINT32 2961 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2964 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2969 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2972 #ifndef VMA_DEBUG_ALIGNMENT 2977 #define VMA_DEBUG_ALIGNMENT (1) 2980 #ifndef VMA_DEBUG_MARGIN 2985 #define VMA_DEBUG_MARGIN (0) 2988 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2993 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2996 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3002 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3005 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3010 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3013 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3018 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3021 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3022 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3026 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3027 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3031 #ifndef VMA_CLASS_NO_COPY 3032 #define VMA_CLASS_NO_COPY(className) \ 3034 className(const className&) = delete; \ 3035 className& operator=(const className&) = delete; 3038 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3041 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3043 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3044 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3050 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3051 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3054 static inline uint32_t VmaCountBitsSet(uint32_t v)
3056 uint32_t c = v - ((v >> 1) & 0x55555555);
3057 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3058 c = ((c >> 4) + c) & 0x0F0F0F0F;
3059 c = ((c >> 8) + c) & 0x00FF00FF;
3060 c = ((c >> 16) + c) & 0x0000FFFF;
3066 template <
typename T>
3067 static inline T VmaAlignUp(T val, T align)
3069 return (val + align - 1) / align * align;
3073 template <
typename T>
3074 static inline T VmaAlignDown(T val, T align)
3076 return val / align * align;
3080 template <
typename T>
3081 static inline T VmaRoundDiv(T x, T y)
3083 return (x + (y / (T)2)) / y;
3091 template <
typename T>
3092 inline bool VmaIsPow2(T x)
3094 return (x & (x-1)) == 0;
3098 static inline uint32_t VmaNextPow2(uint32_t v)
3109 static inline uint64_t VmaNextPow2(uint64_t v)
3123 static inline uint32_t VmaPrevPow2(uint32_t v)
3133 static inline uint64_t VmaPrevPow2(uint64_t v)
3145 static inline bool VmaStrIsEmpty(
const char* pStr)
3147 return pStr == VMA_NULL || *pStr ==
'\0';
3150 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3168 template<
typename Iterator,
typename Compare>
3169 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3171 Iterator centerValue = end; --centerValue;
3172 Iterator insertIndex = beg;
3173 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3175 if(cmp(*memTypeIndex, *centerValue))
3177 if(insertIndex != memTypeIndex)
3179 VMA_SWAP(*memTypeIndex, *insertIndex);
3184 if(insertIndex != centerValue)
3186 VMA_SWAP(*insertIndex, *centerValue);
3191 template<
typename Iterator,
typename Compare>
3192 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3196 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3197 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3198 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3202 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3204 #endif // #ifndef VMA_SORT 3213 static inline bool VmaBlocksOnSamePage(
3214 VkDeviceSize resourceAOffset,
3215 VkDeviceSize resourceASize,
3216 VkDeviceSize resourceBOffset,
3217 VkDeviceSize pageSize)
3219 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3220 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3221 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3222 VkDeviceSize resourceBStart = resourceBOffset;
3223 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3224 return resourceAEndPage == resourceBStartPage;
3227 enum VmaSuballocationType
3229 VMA_SUBALLOCATION_TYPE_FREE = 0,
3230 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3231 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3232 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3233 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3234 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3235 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3244 static inline bool VmaIsBufferImageGranularityConflict(
3245 VmaSuballocationType suballocType1,
3246 VmaSuballocationType suballocType2)
3248 if(suballocType1 > suballocType2)
3250 VMA_SWAP(suballocType1, suballocType2);
3253 switch(suballocType1)
3255 case VMA_SUBALLOCATION_TYPE_FREE:
3257 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3259 case VMA_SUBALLOCATION_TYPE_BUFFER:
3261 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3262 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3263 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3265 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3266 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3267 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3268 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3270 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3271 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3279 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3281 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3282 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3283 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3285 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3289 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3291 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3292 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3293 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3295 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3306 VMA_CLASS_NO_COPY(VmaMutexLock)
3308 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3309 m_pMutex(useMutex ? &mutex : VMA_NULL)
3326 VMA_MUTEX* m_pMutex;
3329 #if VMA_DEBUG_GLOBAL_MUTEX 3330 static VMA_MUTEX gDebugGlobalMutex;
3331 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3333 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3337 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3348 template <
typename CmpLess,
typename IterT,
typename KeyT>
3349 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3351 size_t down = 0, up = (end - beg);
3354 const size_t mid = (down + up) / 2;
3355 if(cmp(*(beg+mid), key))
3370 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3372 if((pAllocationCallbacks != VMA_NULL) &&
3373 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3375 return (*pAllocationCallbacks->pfnAllocation)(
3376 pAllocationCallbacks->pUserData,
3379 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3383 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3387 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3389 if((pAllocationCallbacks != VMA_NULL) &&
3390 (pAllocationCallbacks->pfnFree != VMA_NULL))
3392 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3396 VMA_SYSTEM_FREE(ptr);
3400 template<
typename T>
3401 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3403 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3406 template<
typename T>
3407 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3409 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3412 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3414 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3416 template<
typename T>
3417 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3420 VmaFree(pAllocationCallbacks, ptr);
3423 template<
typename T>
3424 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3428 for(
size_t i = count; i--; )
3432 VmaFree(pAllocationCallbacks, ptr);
3437 template<
typename T>
3438 class VmaStlAllocator
3441 const VkAllocationCallbacks*
const m_pCallbacks;
3442 typedef T value_type;
3444 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3445 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3447 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3448 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3450 template<
typename U>
3451 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3453 return m_pCallbacks == rhs.m_pCallbacks;
3455 template<
typename U>
3456 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3458 return m_pCallbacks != rhs.m_pCallbacks;
3461 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3464 #if VMA_USE_STL_VECTOR 3466 #define VmaVector std::vector 3468 template<
typename T,
typename allocatorT>
3469 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3471 vec.insert(vec.begin() + index, item);
3474 template<
typename T,
typename allocatorT>
3475 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3477 vec.erase(vec.begin() + index);
3480 #else // #if VMA_USE_STL_VECTOR 3485 template<
typename T,
typename AllocatorT>
3489 typedef T value_type;
3491 VmaVector(
const AllocatorT& allocator) :
3492 m_Allocator(allocator),
3499 VmaVector(
size_t count,
const AllocatorT& allocator) :
3500 m_Allocator(allocator),
3501 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3507 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3508 m_Allocator(src.m_Allocator),
3509 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3510 m_Count(src.m_Count),
3511 m_Capacity(src.m_Count)
3515 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3521 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3524 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3528 resize(rhs.m_Count);
3531 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3537 bool empty()
const {
return m_Count == 0; }
3538 size_t size()
const {
return m_Count; }
3539 T* data() {
return m_pArray; }
3540 const T* data()
const {
return m_pArray; }
3542 T& operator[](
size_t index)
3544 VMA_HEAVY_ASSERT(index < m_Count);
3545 return m_pArray[index];
3547 const T& operator[](
size_t index)
const 3549 VMA_HEAVY_ASSERT(index < m_Count);
3550 return m_pArray[index];
3555 VMA_HEAVY_ASSERT(m_Count > 0);
3558 const T& front()
const 3560 VMA_HEAVY_ASSERT(m_Count > 0);
3565 VMA_HEAVY_ASSERT(m_Count > 0);
3566 return m_pArray[m_Count - 1];
3568 const T& back()
const 3570 VMA_HEAVY_ASSERT(m_Count > 0);
3571 return m_pArray[m_Count - 1];
3574 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3576 newCapacity = VMA_MAX(newCapacity, m_Count);
3578 if((newCapacity < m_Capacity) && !freeMemory)
3580 newCapacity = m_Capacity;
3583 if(newCapacity != m_Capacity)
3585 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3588 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3590 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3591 m_Capacity = newCapacity;
3592 m_pArray = newArray;
3596 void resize(
size_t newCount,
bool freeMemory =
false)
3598 size_t newCapacity = m_Capacity;
3599 if(newCount > m_Capacity)
3601 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3605 newCapacity = newCount;
3608 if(newCapacity != m_Capacity)
3610 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3611 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3612 if(elementsToCopy != 0)
3614 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3616 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3617 m_Capacity = newCapacity;
3618 m_pArray = newArray;
3624 void clear(
bool freeMemory =
false)
3626 resize(0, freeMemory);
3629 void insert(
size_t index,
const T& src)
3631 VMA_HEAVY_ASSERT(index <= m_Count);
3632 const size_t oldCount = size();
3633 resize(oldCount + 1);
3634 if(index < oldCount)
3636 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3638 m_pArray[index] = src;
3641 void remove(
size_t index)
3643 VMA_HEAVY_ASSERT(index < m_Count);
3644 const size_t oldCount = size();
3645 if(index < oldCount - 1)
3647 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3649 resize(oldCount - 1);
3652 void push_back(
const T& src)
3654 const size_t newIndex = size();
3655 resize(newIndex + 1);
3656 m_pArray[newIndex] = src;
3661 VMA_HEAVY_ASSERT(m_Count > 0);
3665 void push_front(
const T& src)
3672 VMA_HEAVY_ASSERT(m_Count > 0);
3676 typedef T* iterator;
3678 iterator begin() {
return m_pArray; }
3679 iterator end() {
return m_pArray + m_Count; }
3682 AllocatorT m_Allocator;
3688 template<
typename T,
typename allocatorT>
3689 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3691 vec.insert(index, item);
3694 template<
typename T,
typename allocatorT>
3695 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3700 #endif // #if VMA_USE_STL_VECTOR 3702 template<
typename CmpLess,
typename VectorT>
3703 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3705 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3707 vector.data() + vector.size(),
3709 CmpLess()) - vector.data();
3710 VmaVectorInsert(vector, indexToInsert, value);
3711 return indexToInsert;
3714 template<
typename CmpLess,
typename VectorT>
3715 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3718 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3723 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3725 size_t indexToRemove = it - vector.begin();
3726 VmaVectorRemove(vector, indexToRemove);
3732 template<
typename CmpLess,
typename IterT,
typename KeyT>
3733 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3736 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3737 beg, end, value, comparator);
3739 (!comparator(*it, value) && !comparator(value, *it)))
3754 template<
typename T>
3755 class VmaPoolAllocator
3757 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3759 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3760 ~VmaPoolAllocator();
3768 uint32_t NextFreeIndex;
3775 uint32_t FirstFreeIndex;
3778 const VkAllocationCallbacks* m_pAllocationCallbacks;
3779 size_t m_ItemsPerBlock;
3780 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3782 ItemBlock& CreateNewBlock();
3785 template<
typename T>
3786 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3787 m_pAllocationCallbacks(pAllocationCallbacks),
3788 m_ItemsPerBlock(itemsPerBlock),
3789 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3791 VMA_ASSERT(itemsPerBlock > 0);
3794 template<
typename T>
3795 VmaPoolAllocator<T>::~VmaPoolAllocator()
3800 template<
typename T>
3801 void VmaPoolAllocator<T>::Clear()
3803 for(
size_t i = m_ItemBlocks.size(); i--; )
3804 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3805 m_ItemBlocks.clear();
3808 template<
typename T>
3809 T* VmaPoolAllocator<T>::Alloc()
3811 for(
size_t i = m_ItemBlocks.size(); i--; )
3813 ItemBlock& block = m_ItemBlocks[i];
3815 if(block.FirstFreeIndex != UINT32_MAX)
3817 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3818 block.FirstFreeIndex = pItem->NextFreeIndex;
3819 return &pItem->Value;
3824 ItemBlock& newBlock = CreateNewBlock();
3825 Item*
const pItem = &newBlock.pItems[0];
3826 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3827 return &pItem->Value;
3830 template<
typename T>
3831 void VmaPoolAllocator<T>::Free(T* ptr)
3834 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3836 ItemBlock& block = m_ItemBlocks[i];
3840 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3843 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3845 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3846 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3847 block.FirstFreeIndex = index;
3851 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3854 template<
typename T>
3855 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3857 ItemBlock newBlock = {
3858 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3860 m_ItemBlocks.push_back(newBlock);
3863 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3864 newBlock.pItems[i].NextFreeIndex = i + 1;
3865 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3866 return m_ItemBlocks.back();
3872 #if VMA_USE_STL_LIST 3874 #define VmaList std::list 3876 #else // #if VMA_USE_STL_LIST 3878 template<
typename T>
3887 template<
typename T>
3890 VMA_CLASS_NO_COPY(VmaRawList)
3892 typedef VmaListItem<T> ItemType;
3894 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3898 size_t GetCount()
const {
return m_Count; }
3899 bool IsEmpty()
const {
return m_Count == 0; }
3901 ItemType* Front() {
return m_pFront; }
3902 const ItemType* Front()
const {
return m_pFront; }
3903 ItemType* Back() {
return m_pBack; }
3904 const ItemType* Back()
const {
return m_pBack; }
3906 ItemType* PushBack();
3907 ItemType* PushFront();
3908 ItemType* PushBack(
const T& value);
3909 ItemType* PushFront(
const T& value);
3914 ItemType* InsertBefore(ItemType* pItem);
3916 ItemType* InsertAfter(ItemType* pItem);
3918 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3919 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3921 void Remove(ItemType* pItem);
3924 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3925 VmaPoolAllocator<ItemType> m_ItemAllocator;
3931 template<
typename T>
3932 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3933 m_pAllocationCallbacks(pAllocationCallbacks),
3934 m_ItemAllocator(pAllocationCallbacks, 128),
3941 template<
typename T>
3942 VmaRawList<T>::~VmaRawList()
3948 template<
typename T>
3949 void VmaRawList<T>::Clear()
3951 if(IsEmpty() ==
false)
3953 ItemType* pItem = m_pBack;
3954 while(pItem != VMA_NULL)
3956 ItemType*
const pPrevItem = pItem->pPrev;
3957 m_ItemAllocator.Free(pItem);
3960 m_pFront = VMA_NULL;
3966 template<
typename T>
3967 VmaListItem<T>* VmaRawList<T>::PushBack()
3969 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3970 pNewItem->pNext = VMA_NULL;
3973 pNewItem->pPrev = VMA_NULL;
3974 m_pFront = pNewItem;
3980 pNewItem->pPrev = m_pBack;
3981 m_pBack->pNext = pNewItem;
3988 template<
typename T>
3989 VmaListItem<T>* VmaRawList<T>::PushFront()
3991 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3992 pNewItem->pPrev = VMA_NULL;
3995 pNewItem->pNext = VMA_NULL;
3996 m_pFront = pNewItem;
4002 pNewItem->pNext = m_pFront;
4003 m_pFront->pPrev = pNewItem;
4004 m_pFront = pNewItem;
4010 template<
typename T>
4011 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4013 ItemType*
const pNewItem = PushBack();
4014 pNewItem->Value = value;
4018 template<
typename T>
4019 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4021 ItemType*
const pNewItem = PushFront();
4022 pNewItem->Value = value;
4026 template<
typename T>
4027 void VmaRawList<T>::PopBack()
4029 VMA_HEAVY_ASSERT(m_Count > 0);
4030 ItemType*
const pBackItem = m_pBack;
4031 ItemType*
const pPrevItem = pBackItem->pPrev;
4032 if(pPrevItem != VMA_NULL)
4034 pPrevItem->pNext = VMA_NULL;
4036 m_pBack = pPrevItem;
4037 m_ItemAllocator.Free(pBackItem);
4041 template<
typename T>
4042 void VmaRawList<T>::PopFront()
4044 VMA_HEAVY_ASSERT(m_Count > 0);
4045 ItemType*
const pFrontItem = m_pFront;
4046 ItemType*
const pNextItem = pFrontItem->pNext;
4047 if(pNextItem != VMA_NULL)
4049 pNextItem->pPrev = VMA_NULL;
4051 m_pFront = pNextItem;
4052 m_ItemAllocator.Free(pFrontItem);
4056 template<
typename T>
4057 void VmaRawList<T>::Remove(ItemType* pItem)
4059 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4060 VMA_HEAVY_ASSERT(m_Count > 0);
4062 if(pItem->pPrev != VMA_NULL)
4064 pItem->pPrev->pNext = pItem->pNext;
4068 VMA_HEAVY_ASSERT(m_pFront == pItem);
4069 m_pFront = pItem->pNext;
4072 if(pItem->pNext != VMA_NULL)
4074 pItem->pNext->pPrev = pItem->pPrev;
4078 VMA_HEAVY_ASSERT(m_pBack == pItem);
4079 m_pBack = pItem->pPrev;
4082 m_ItemAllocator.Free(pItem);
4086 template<
typename T>
4087 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4089 if(pItem != VMA_NULL)
4091 ItemType*
const prevItem = pItem->pPrev;
4092 ItemType*
const newItem = m_ItemAllocator.Alloc();
4093 newItem->pPrev = prevItem;
4094 newItem->pNext = pItem;
4095 pItem->pPrev = newItem;
4096 if(prevItem != VMA_NULL)
4098 prevItem->pNext = newItem;
4102 VMA_HEAVY_ASSERT(m_pFront == pItem);
4112 template<
typename T>
4113 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4115 if(pItem != VMA_NULL)
4117 ItemType*
const nextItem = pItem->pNext;
4118 ItemType*
const newItem = m_ItemAllocator.Alloc();
4119 newItem->pNext = nextItem;
4120 newItem->pPrev = pItem;
4121 pItem->pNext = newItem;
4122 if(nextItem != VMA_NULL)
4124 nextItem->pPrev = newItem;
4128 VMA_HEAVY_ASSERT(m_pBack == pItem);
4138 template<
typename T>
4139 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4141 ItemType*
const newItem = InsertBefore(pItem);
4142 newItem->Value = value;
4146 template<
typename T>
4147 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4149 ItemType*
const newItem = InsertAfter(pItem);
4150 newItem->Value = value;
4154 template<
typename T,
typename AllocatorT>
4157 VMA_CLASS_NO_COPY(VmaList)
4168 T& operator*()
const 4170 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4171 return m_pItem->Value;
4173 T* operator->()
const 4175 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4176 return &m_pItem->Value;
4179 iterator& operator++()
4181 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4182 m_pItem = m_pItem->pNext;
4185 iterator& operator--()
4187 if(m_pItem != VMA_NULL)
4189 m_pItem = m_pItem->pPrev;
4193 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4194 m_pItem = m_pList->Back();
4199 iterator operator++(
int)
4201 iterator result = *
this;
4205 iterator operator--(
int)
4207 iterator result = *
this;
4212 bool operator==(
const iterator& rhs)
const 4214 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4215 return m_pItem == rhs.m_pItem;
4217 bool operator!=(
const iterator& rhs)
const 4219 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4220 return m_pItem != rhs.m_pItem;
4224 VmaRawList<T>* m_pList;
4225 VmaListItem<T>* m_pItem;
4227 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4233 friend class VmaList<T, AllocatorT>;
4236 class const_iterator
4245 const_iterator(
const iterator& src) :
4246 m_pList(src.m_pList),
4247 m_pItem(src.m_pItem)
4251 const T& operator*()
const 4253 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4254 return m_pItem->Value;
4256 const T* operator->()
const 4258 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4259 return &m_pItem->Value;
4262 const_iterator& operator++()
4264 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4265 m_pItem = m_pItem->pNext;
4268 const_iterator& operator--()
4270 if(m_pItem != VMA_NULL)
4272 m_pItem = m_pItem->pPrev;
4276 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4277 m_pItem = m_pList->Back();
4282 const_iterator operator++(
int)
4284 const_iterator result = *
this;
4288 const_iterator operator--(
int)
4290 const_iterator result = *
this;
4295 bool operator==(
const const_iterator& rhs)
const 4297 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4298 return m_pItem == rhs.m_pItem;
4300 bool operator!=(
const const_iterator& rhs)
const 4302 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4303 return m_pItem != rhs.m_pItem;
4307 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4313 const VmaRawList<T>* m_pList;
4314 const VmaListItem<T>* m_pItem;
4316 friend class VmaList<T, AllocatorT>;
4319 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4321 bool empty()
const {
return m_RawList.IsEmpty(); }
4322 size_t size()
const {
return m_RawList.GetCount(); }
4324 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4325 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4327 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4328 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4330 void clear() { m_RawList.Clear(); }
4331 void push_back(
const T& value) { m_RawList.PushBack(value); }
4332 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4333 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4336 VmaRawList<T> m_RawList;
4339 #endif // #if VMA_USE_STL_LIST 4347 #if VMA_USE_STL_UNORDERED_MAP 4349 #define VmaPair std::pair 4351 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4352 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4354 #else // #if VMA_USE_STL_UNORDERED_MAP 4356 template<
typename T1,
typename T2>
4362 VmaPair() : first(), second() { }
4363 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4369 template<
typename KeyT,
typename ValueT>
4373 typedef VmaPair<KeyT, ValueT> PairType;
4374 typedef PairType* iterator;
4376 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4378 iterator begin() {
return m_Vector.begin(); }
4379 iterator end() {
return m_Vector.end(); }
4381 void insert(
const PairType& pair);
4382 iterator find(
const KeyT& key);
4383 void erase(iterator it);
4386 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4389 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4391 template<
typename FirstT,
typename SecondT>
4392 struct VmaPairFirstLess
4394 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4396 return lhs.first < rhs.first;
4398 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4400 return lhs.first < rhsFirst;
4404 template<
typename KeyT,
typename ValueT>
4405 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4407 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4409 m_Vector.data() + m_Vector.size(),
4411 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4412 VmaVectorInsert(m_Vector, indexToInsert, pair);
4415 template<
typename KeyT,
typename ValueT>
4416 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4418 PairType* it = VmaBinaryFindFirstNotLess(
4420 m_Vector.data() + m_Vector.size(),
4422 VmaPairFirstLess<KeyT, ValueT>());
4423 if((it != m_Vector.end()) && (it->first == key))
4429 return m_Vector.end();
4433 template<
typename KeyT,
typename ValueT>
4434 void VmaMap<KeyT, ValueT>::erase(iterator it)
4436 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4439 #endif // #if VMA_USE_STL_UNORDERED_MAP 4445 class VmaDeviceMemoryBlock;
4447 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4449 struct VmaAllocation_T
4451 VMA_CLASS_NO_COPY(VmaAllocation_T)
4453 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4457 FLAG_USER_DATA_STRING = 0x01,
4461 enum ALLOCATION_TYPE
4463 ALLOCATION_TYPE_NONE,
4464 ALLOCATION_TYPE_BLOCK,
4465 ALLOCATION_TYPE_DEDICATED,
4468 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4471 m_pUserData(VMA_NULL),
4472 m_LastUseFrameIndex(currentFrameIndex),
4473 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4474 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4476 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4478 #if VMA_STATS_STRING_ENABLED 4479 m_CreationFrameIndex = currentFrameIndex;
4480 m_BufferImageUsage = 0;
4486 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4489 VMA_ASSERT(m_pUserData == VMA_NULL);
4492 void InitBlockAllocation(
4494 VmaDeviceMemoryBlock* block,
4495 VkDeviceSize offset,
4496 VkDeviceSize alignment,
4498 VmaSuballocationType suballocationType,
4502 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4503 VMA_ASSERT(block != VMA_NULL);
4504 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4505 m_Alignment = alignment;
4507 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4508 m_SuballocationType = (uint8_t)suballocationType;
4509 m_BlockAllocation.m_hPool = hPool;
4510 m_BlockAllocation.m_Block = block;
4511 m_BlockAllocation.m_Offset = offset;
4512 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4517 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4518 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4519 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4520 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4521 m_BlockAllocation.m_Block = VMA_NULL;
4522 m_BlockAllocation.m_Offset = 0;
4523 m_BlockAllocation.m_CanBecomeLost =
true;
4526 void ChangeBlockAllocation(
4528 VmaDeviceMemoryBlock* block,
4529 VkDeviceSize offset);
4532 void InitDedicatedAllocation(
4533 uint32_t memoryTypeIndex,
4534 VkDeviceMemory hMemory,
4535 VmaSuballocationType suballocationType,
4539 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4540 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4541 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4544 m_SuballocationType = (uint8_t)suballocationType;
4545 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4546 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4547 m_DedicatedAllocation.m_hMemory = hMemory;
4548 m_DedicatedAllocation.m_pMappedData = pMappedData;
4551 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4552 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4553 VkDeviceSize GetSize()
const {
return m_Size; }
4554 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4555 void* GetUserData()
const {
return m_pUserData; }
4556 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4557 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4559 VmaDeviceMemoryBlock* GetBlock()
const 4561 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4562 return m_BlockAllocation.m_Block;
4564 VkDeviceSize GetOffset()
const;
4565 VkDeviceMemory GetMemory()
const;
4566 uint32_t GetMemoryTypeIndex()
const;
4567 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4568 void* GetMappedData()
const;
4569 bool CanBecomeLost()
const;
4572 uint32_t GetLastUseFrameIndex()
const 4574 return m_LastUseFrameIndex.load();
4576 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4578 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4588 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4590 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4592 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4603 void BlockAllocMap();
4604 void BlockAllocUnmap();
4605 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4608 #if VMA_STATS_STRING_ENABLED 4609 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4610 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4612 void InitBufferImageUsage(uint32_t bufferImageUsage)
4614 VMA_ASSERT(m_BufferImageUsage == 0);
4615 m_BufferImageUsage = bufferImageUsage;
4618 void PrintParameters(
class VmaJsonWriter& json)
const;
4622 VkDeviceSize m_Alignment;
4623 VkDeviceSize m_Size;
4625 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4627 uint8_t m_SuballocationType;
4634 struct BlockAllocation
4637 VmaDeviceMemoryBlock* m_Block;
4638 VkDeviceSize m_Offset;
4639 bool m_CanBecomeLost;
4643 struct DedicatedAllocation
4645 uint32_t m_MemoryTypeIndex;
4646 VkDeviceMemory m_hMemory;
4647 void* m_pMappedData;
4653 BlockAllocation m_BlockAllocation;
4655 DedicatedAllocation m_DedicatedAllocation;
4658 #if VMA_STATS_STRING_ENABLED 4659 uint32_t m_CreationFrameIndex;
4660 uint32_t m_BufferImageUsage;
4670 struct VmaSuballocation
4672 VkDeviceSize offset;
4675 VmaSuballocationType type;
4679 struct VmaSuballocationOffsetLess
4681 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4683 return lhs.offset < rhs.offset;
4686 struct VmaSuballocationOffsetGreater
4688 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4690 return lhs.offset > rhs.offset;
4694 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4697 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4712 struct VmaAllocationRequest
4714 VkDeviceSize offset;
4715 VkDeviceSize sumFreeSize;
4716 VkDeviceSize sumItemSize;
4717 VmaSuballocationList::iterator item;
4718 size_t itemsToMakeLostCount;
4721 VkDeviceSize CalcCost()
const 4723 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4731 class VmaBlockMetadata
4735 virtual ~VmaBlockMetadata() { }
4736 virtual void Init(VkDeviceSize size) { m_Size = size; }
4739 virtual bool Validate()
const = 0;
4740 VkDeviceSize GetSize()
const {
return m_Size; }
4741 virtual size_t GetAllocationCount()
const = 0;
4742 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4743 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4745 virtual bool IsEmpty()
const = 0;
4747 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4749 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4751 #if VMA_STATS_STRING_ENABLED 4752 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4758 virtual bool CreateAllocationRequest(
4759 uint32_t currentFrameIndex,
4760 uint32_t frameInUseCount,
4761 VkDeviceSize bufferImageGranularity,
4762 VkDeviceSize allocSize,
4763 VkDeviceSize allocAlignment,
4765 VmaSuballocationType allocType,
4766 bool canMakeOtherLost,
4768 VmaAllocationRequest* pAllocationRequest) = 0;
4770 virtual bool MakeRequestedAllocationsLost(
4771 uint32_t currentFrameIndex,
4772 uint32_t frameInUseCount,
4773 VmaAllocationRequest* pAllocationRequest) = 0;
4775 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4777 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4781 const VmaAllocationRequest& request,
4782 VmaSuballocationType type,
4783 VkDeviceSize allocSize,
4789 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4792 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
4794 #if VMA_STATS_STRING_ENABLED 4795 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4796 VkDeviceSize unusedBytes,
4797 size_t allocationCount,
4798 size_t unusedRangeCount)
const;
4799 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4800 VkDeviceSize offset,
4802 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4803 VkDeviceSize offset,
4804 VkDeviceSize size)
const;
4805 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4809 VkDeviceSize m_Size;
4810 const VkAllocationCallbacks* m_pAllocationCallbacks;
4813 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 4814 VMA_ASSERT(0 && "Validation failed: " #cond); \ 4818 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4820 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4823 virtual ~VmaBlockMetadata_Generic();
4824 virtual void Init(VkDeviceSize size);
4826 virtual bool Validate()
const;
4827 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4828 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4829 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4830 virtual bool IsEmpty()
const;
4832 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4833 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4835 #if VMA_STATS_STRING_ENABLED 4836 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4839 virtual bool CreateAllocationRequest(
4840 uint32_t currentFrameIndex,
4841 uint32_t frameInUseCount,
4842 VkDeviceSize bufferImageGranularity,
4843 VkDeviceSize allocSize,
4844 VkDeviceSize allocAlignment,
4846 VmaSuballocationType allocType,
4847 bool canMakeOtherLost,
4849 VmaAllocationRequest* pAllocationRequest);
4851 virtual bool MakeRequestedAllocationsLost(
4852 uint32_t currentFrameIndex,
4853 uint32_t frameInUseCount,
4854 VmaAllocationRequest* pAllocationRequest);
4856 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4858 virtual VkResult CheckCorruption(
const void* pBlockData);
4861 const VmaAllocationRequest& request,
4862 VmaSuballocationType type,
4863 VkDeviceSize allocSize,
4868 virtual void FreeAtOffset(VkDeviceSize offset);
4871 uint32_t m_FreeCount;
4872 VkDeviceSize m_SumFreeSize;
4873 VmaSuballocationList m_Suballocations;
4876 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4878 bool ValidateFreeSuballocationList()
const;
4882 bool CheckAllocation(
4883 uint32_t currentFrameIndex,
4884 uint32_t frameInUseCount,
4885 VkDeviceSize bufferImageGranularity,
4886 VkDeviceSize allocSize,
4887 VkDeviceSize allocAlignment,
4888 VmaSuballocationType allocType,
4889 VmaSuballocationList::const_iterator suballocItem,
4890 bool canMakeOtherLost,
4891 VkDeviceSize* pOffset,
4892 size_t* itemsToMakeLostCount,
4893 VkDeviceSize* pSumFreeSize,
4894 VkDeviceSize* pSumItemSize)
const;
4896 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4900 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4903 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4906 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4987 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
4989 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
4992 virtual ~VmaBlockMetadata_Linear();
4993 virtual void Init(VkDeviceSize size);
4995 virtual bool Validate()
const;
4996 virtual size_t GetAllocationCount()
const;
4997 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4998 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4999 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5001 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5002 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5004 #if VMA_STATS_STRING_ENABLED 5005 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5008 virtual bool CreateAllocationRequest(
5009 uint32_t currentFrameIndex,
5010 uint32_t frameInUseCount,
5011 VkDeviceSize bufferImageGranularity,
5012 VkDeviceSize allocSize,
5013 VkDeviceSize allocAlignment,
5015 VmaSuballocationType allocType,
5016 bool canMakeOtherLost,
5018 VmaAllocationRequest* pAllocationRequest);
5020 virtual bool MakeRequestedAllocationsLost(
5021 uint32_t currentFrameIndex,
5022 uint32_t frameInUseCount,
5023 VmaAllocationRequest* pAllocationRequest);
5025 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5027 virtual VkResult CheckCorruption(
const void* pBlockData);
5030 const VmaAllocationRequest& request,
5031 VmaSuballocationType type,
5032 VkDeviceSize allocSize,
5037 virtual void FreeAtOffset(VkDeviceSize offset);
5047 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5049 enum SECOND_VECTOR_MODE
5051 SECOND_VECTOR_EMPTY,
5056 SECOND_VECTOR_RING_BUFFER,
5062 SECOND_VECTOR_DOUBLE_STACK,
5065 VkDeviceSize m_SumFreeSize;
5066 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5067 uint32_t m_1stVectorIndex;
5068 SECOND_VECTOR_MODE m_2ndVectorMode;
5070 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5071 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5072 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5073 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5076 size_t m_1stNullItemsBeginCount;
5078 size_t m_1stNullItemsMiddleCount;
5080 size_t m_2ndNullItemsCount;
5082 bool ShouldCompact1st()
const;
5083 void CleanupAfterFree();
5097 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5099 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5102 virtual ~VmaBlockMetadata_Buddy();
5103 virtual void Init(VkDeviceSize size);
5105 virtual bool Validate()
const;
5106 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5107 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5108 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5109 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5111 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5112 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5114 #if VMA_STATS_STRING_ENABLED 5115 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5118 virtual bool CreateAllocationRequest(
5119 uint32_t currentFrameIndex,
5120 uint32_t frameInUseCount,
5121 VkDeviceSize bufferImageGranularity,
5122 VkDeviceSize allocSize,
5123 VkDeviceSize allocAlignment,
5125 VmaSuballocationType allocType,
5126 bool canMakeOtherLost,
5128 VmaAllocationRequest* pAllocationRequest);
5130 virtual bool MakeRequestedAllocationsLost(
5131 uint32_t currentFrameIndex,
5132 uint32_t frameInUseCount,
5133 VmaAllocationRequest* pAllocationRequest);
5135 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5137 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5140 const VmaAllocationRequest& request,
5141 VmaSuballocationType type,
5142 VkDeviceSize allocSize,
5146 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5147 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5150 static const VkDeviceSize MIN_NODE_SIZE = 32;
5151 static const size_t MAX_LEVELS = 30;
5153 struct ValidationContext
5155 size_t calculatedAllocationCount;
5156 size_t calculatedFreeCount;
5157 VkDeviceSize calculatedSumFreeSize;
5159 ValidationContext() :
5160 calculatedAllocationCount(0),
5161 calculatedFreeCount(0),
5162 calculatedSumFreeSize(0) { }
5167 VkDeviceSize offset;
5197 VkDeviceSize m_UsableSize;
5198 uint32_t m_LevelCount;
5204 } m_FreeList[MAX_LEVELS];
5206 size_t m_AllocationCount;
5210 VkDeviceSize m_SumFreeSize;
5212 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5213 void DeleteNode(Node* node);
5214 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5215 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5216 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5218 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5219 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5223 void AddToFreeListFront(uint32_t level, Node* node);
5227 void RemoveFromFreeList(uint32_t level, Node* node);
5229 #if VMA_STATS_STRING_ENABLED 5230 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5240 class VmaDeviceMemoryBlock
5242 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5244 VmaBlockMetadata* m_pMetadata;
5248 ~VmaDeviceMemoryBlock()
5250 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5251 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5257 uint32_t newMemoryTypeIndex,
5258 VkDeviceMemory newMemory,
5259 VkDeviceSize newSize,
5261 uint32_t algorithm);
5265 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5266 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5267 uint32_t GetId()
const {
return m_Id; }
5268 void* GetMappedData()
const {
return m_pMappedData; }
5271 bool Validate()
const;
5276 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5279 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5280 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5282 VkResult BindBufferMemory(
5286 VkResult BindImageMemory(
5292 uint32_t m_MemoryTypeIndex;
5294 VkDeviceMemory m_hMemory;
5299 uint32_t m_MapCount;
5300 void* m_pMappedData;
5303 struct VmaPointerLess
5305 bool operator()(
const void* lhs,
const void* rhs)
const 5311 class VmaDefragmentator;
5319 struct VmaBlockVector
5321 VMA_CLASS_NO_COPY(VmaBlockVector)
5325 uint32_t memoryTypeIndex,
5326 VkDeviceSize preferredBlockSize,
5327 size_t minBlockCount,
5328 size_t maxBlockCount,
5329 VkDeviceSize bufferImageGranularity,
5330 uint32_t frameInUseCount,
5332 bool explicitBlockSize,
5333 uint32_t algorithm);
5336 VkResult CreateMinBlocks();
5338 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5339 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5340 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5341 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5342 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5346 bool IsEmpty()
const {
return m_Blocks.empty(); }
5347 bool IsCorruptionDetectionEnabled()
const;
5351 uint32_t currentFrameIndex,
5353 VkDeviceSize alignment,
5355 VmaSuballocationType suballocType,
5356 size_t allocationCount,
5365 #if VMA_STATS_STRING_ENABLED 5366 void PrintDetailedMap(
class VmaJsonWriter& json);
5369 void MakePoolAllocationsLost(
5370 uint32_t currentFrameIndex,
5371 size_t* pLostAllocationCount);
5372 VkResult CheckCorruption();
5374 VmaDefragmentator* EnsureDefragmentator(
5376 uint32_t currentFrameIndex);
5378 VkResult Defragment(
5380 VkDeviceSize& maxBytesToMove,
5381 uint32_t& maxAllocationsToMove);
5383 void DestroyDefragmentator();
5386 friend class VmaDefragmentator;
5389 const uint32_t m_MemoryTypeIndex;
5390 const VkDeviceSize m_PreferredBlockSize;
5391 const size_t m_MinBlockCount;
5392 const size_t m_MaxBlockCount;
5393 const VkDeviceSize m_BufferImageGranularity;
5394 const uint32_t m_FrameInUseCount;
5395 const bool m_IsCustomPool;
5396 const bool m_ExplicitBlockSize;
5397 const uint32_t m_Algorithm;
5398 bool m_HasEmptyBlock;
5401 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5405 VmaDefragmentator* m_pDefragmentator;
5406 uint32_t m_NextBlockId;
5408 VkDeviceSize CalcMaxBlockSize()
const;
5411 void Remove(VmaDeviceMemoryBlock* pBlock);
5415 void IncrementallySortBlocks();
5417 VkResult AllocatePage(
5419 uint32_t currentFrameIndex,
5421 VkDeviceSize alignment,
5423 VmaSuballocationType suballocType,
5427 VkResult AllocateFromBlock(
5428 VmaDeviceMemoryBlock* pBlock,
5430 uint32_t currentFrameIndex,
5432 VkDeviceSize alignment,
5435 VmaSuballocationType suballocType,
5439 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5444 VMA_CLASS_NO_COPY(VmaPool_T)
5446 VmaBlockVector m_BlockVector;
5451 VkDeviceSize preferredBlockSize);
5454 uint32_t GetId()
const {
return m_Id; }
5455 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5457 #if VMA_STATS_STRING_ENABLED 5465 class VmaDefragmentator
5467 VMA_CLASS_NO_COPY(VmaDefragmentator)
5470 VmaBlockVector*
const m_pBlockVector;
5471 uint32_t m_CurrentFrameIndex;
5472 VkDeviceSize m_BytesMoved;
5473 uint32_t m_AllocationsMoved;
5475 struct AllocationInfo
5478 VkBool32* m_pChanged;
5481 m_hAllocation(VK_NULL_HANDLE),
5482 m_pChanged(VMA_NULL)
5487 struct AllocationInfoSizeGreater
5489 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5491 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5496 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5500 VmaDeviceMemoryBlock* m_pBlock;
5501 bool m_HasNonMovableAllocations;
5502 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5504 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5506 m_HasNonMovableAllocations(true),
5507 m_Allocations(pAllocationCallbacks),
5508 m_pMappedDataForDefragmentation(VMA_NULL)
5512 void CalcHasNonMovableAllocations()
5514 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5515 const size_t defragmentAllocCount = m_Allocations.size();
5516 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5519 void SortAllocationsBySizeDescecnding()
5521 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5524 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5529 void* m_pMappedDataForDefragmentation;
5532 struct BlockPointerLess
5534 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5536 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5538 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5540 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5546 struct BlockInfoCompareMoveDestination
5548 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5550 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5554 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5558 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5566 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5567 BlockInfoVector m_Blocks;
5569 VkResult DefragmentRound(
5570 VkDeviceSize maxBytesToMove,
5571 uint32_t maxAllocationsToMove);
5573 static bool MoveMakesSense(
5574 size_t dstBlockIndex, VkDeviceSize dstOffset,
5575 size_t srcBlockIndex, VkDeviceSize srcOffset);
5580 VmaBlockVector* pBlockVector,
5581 uint32_t currentFrameIndex);
5583 ~VmaDefragmentator();
5585 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5586 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5588 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5590 VkResult Defragment(
5591 VkDeviceSize maxBytesToMove,
5592 uint32_t maxAllocationsToMove);
5595 #if VMA_RECORDING_ENABLED 5602 void WriteConfiguration(
5603 const VkPhysicalDeviceProperties& devProps,
5604 const VkPhysicalDeviceMemoryProperties& memProps,
5605 bool dedicatedAllocationExtensionEnabled);
5608 void RecordCreateAllocator(uint32_t frameIndex);
5609 void RecordDestroyAllocator(uint32_t frameIndex);
5610 void RecordCreatePool(uint32_t frameIndex,
5613 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5614 void RecordAllocateMemory(uint32_t frameIndex,
5615 const VkMemoryRequirements& vkMemReq,
5618 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5619 const VkMemoryRequirements& vkMemReq,
5620 bool requiresDedicatedAllocation,
5621 bool prefersDedicatedAllocation,
5624 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5625 const VkMemoryRequirements& vkMemReq,
5626 bool requiresDedicatedAllocation,
5627 bool prefersDedicatedAllocation,
5630 void RecordFreeMemory(uint32_t frameIndex,
5632 void RecordSetAllocationUserData(uint32_t frameIndex,
5634 const void* pUserData);
5635 void RecordCreateLostAllocation(uint32_t frameIndex,
5637 void RecordMapMemory(uint32_t frameIndex,
5639 void RecordUnmapMemory(uint32_t frameIndex,
5641 void RecordFlushAllocation(uint32_t frameIndex,
5642 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5643 void RecordInvalidateAllocation(uint32_t frameIndex,
5644 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5645 void RecordCreateBuffer(uint32_t frameIndex,
5646 const VkBufferCreateInfo& bufCreateInfo,
5649 void RecordCreateImage(uint32_t frameIndex,
5650 const VkImageCreateInfo& imageCreateInfo,
5653 void RecordDestroyBuffer(uint32_t frameIndex,
5655 void RecordDestroyImage(uint32_t frameIndex,
5657 void RecordTouchAllocation(uint32_t frameIndex,
5659 void RecordGetAllocationInfo(uint32_t frameIndex,
5661 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5671 class UserDataString
5675 const char* GetString()
const {
return m_Str; }
5685 VMA_MUTEX m_FileMutex;
5687 int64_t m_StartCounter;
5689 void GetBasicParams(CallParams& outParams);
5693 #endif // #if VMA_RECORDING_ENABLED 5696 struct VmaAllocator_T
5698 VMA_CLASS_NO_COPY(VmaAllocator_T)
5701 bool m_UseKhrDedicatedAllocation;
5703 bool m_AllocationCallbacksSpecified;
5704 VkAllocationCallbacks m_AllocationCallbacks;
5708 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5709 VMA_MUTEX m_HeapSizeLimitMutex;
5711 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5712 VkPhysicalDeviceMemoryProperties m_MemProps;
5715 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5718 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5719 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5720 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5726 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5728 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5732 return m_VulkanFunctions;
5735 VkDeviceSize GetBufferImageGranularity()
const 5738 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5739 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5742 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5743 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5745 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5747 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5748 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5751 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5753 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5754 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5757 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5759 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5760 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5761 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5764 bool IsIntegratedGpu()
const 5766 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5769 #if VMA_RECORDING_ENABLED 5770 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5773 void GetBufferMemoryRequirements(
5775 VkMemoryRequirements& memReq,
5776 bool& requiresDedicatedAllocation,
5777 bool& prefersDedicatedAllocation)
const;
5778 void GetImageMemoryRequirements(
5780 VkMemoryRequirements& memReq,
5781 bool& requiresDedicatedAllocation,
5782 bool& prefersDedicatedAllocation)
const;
5785 VkResult AllocateMemory(
5786 const VkMemoryRequirements& vkMemReq,
5787 bool requiresDedicatedAllocation,
5788 bool prefersDedicatedAllocation,
5789 VkBuffer dedicatedBuffer,
5790 VkImage dedicatedImage,
5792 VmaSuballocationType suballocType,
5793 size_t allocationCount,
5798 size_t allocationCount,
5801 void CalculateStats(
VmaStats* pStats);
5803 #if VMA_STATS_STRING_ENABLED 5804 void PrintDetailedMap(
class VmaJsonWriter& json);
5807 VkResult Defragment(
5809 size_t allocationCount,
5810 VkBool32* pAllocationsChanged,
5818 void DestroyPool(
VmaPool pool);
5821 void SetCurrentFrameIndex(uint32_t frameIndex);
5822 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5824 void MakePoolAllocationsLost(
5826 size_t* pLostAllocationCount);
5827 VkResult CheckPoolCorruption(
VmaPool hPool);
5828 VkResult CheckCorruption(uint32_t memoryTypeBits);
5832 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5833 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5838 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5839 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5841 void FlushOrInvalidateAllocation(
5843 VkDeviceSize offset, VkDeviceSize size,
5844 VMA_CACHE_OPERATION op);
5846 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5849 VkDeviceSize m_PreferredLargeHeapBlockSize;
5851 VkPhysicalDevice m_PhysicalDevice;
5852 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5854 VMA_MUTEX m_PoolsMutex;
5856 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5857 uint32_t m_NextPoolId;
5861 #if VMA_RECORDING_ENABLED 5862 VmaRecorder* m_pRecorder;
5867 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5869 VkResult AllocateMemoryOfType(
5871 VkDeviceSize alignment,
5872 bool dedicatedAllocation,
5873 VkBuffer dedicatedBuffer,
5874 VkImage dedicatedImage,
5876 uint32_t memTypeIndex,
5877 VmaSuballocationType suballocType,
5878 size_t allocationCount,
5882 VkResult AllocateDedicatedMemoryPage(
5884 VmaSuballocationType suballocType,
5885 uint32_t memTypeIndex,
5886 const VkMemoryAllocateInfo& allocInfo,
5888 bool isUserDataString,
5893 VkResult AllocateDedicatedMemory(
5895 VmaSuballocationType suballocType,
5896 uint32_t memTypeIndex,
5898 bool isUserDataString,
5900 VkBuffer dedicatedBuffer,
5901 VkImage dedicatedImage,
5902 size_t allocationCount,
5912 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5914 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5917 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5919 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5922 template<
typename T>
5925 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5928 template<
typename T>
5929 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5931 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5934 template<
typename T>
5935 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5940 VmaFree(hAllocator, ptr);
5944 template<
typename T>
5945 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5949 for(
size_t i = count; i--; )
5951 VmaFree(hAllocator, ptr);
5958 #if VMA_STATS_STRING_ENABLED 5960 class VmaStringBuilder
5963 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5964 size_t GetLength()
const {
return m_Data.size(); }
5965 const char* GetData()
const {
return m_Data.data(); }
5967 void Add(
char ch) { m_Data.push_back(ch); }
5968 void Add(
const char* pStr);
5969 void AddNewLine() { Add(
'\n'); }
5970 void AddNumber(uint32_t num);
5971 void AddNumber(uint64_t num);
5972 void AddPointer(
const void* ptr);
5975 VmaVector< char, VmaStlAllocator<char> > m_Data;
5978 void VmaStringBuilder::Add(
const char* pStr)
5980 const size_t strLen = strlen(pStr);
5983 const size_t oldCount = m_Data.size();
5984 m_Data.resize(oldCount + strLen);
5985 memcpy(m_Data.data() + oldCount, pStr, strLen);
5989 void VmaStringBuilder::AddNumber(uint32_t num)
5992 VmaUint32ToStr(buf,
sizeof(buf), num);
5996 void VmaStringBuilder::AddNumber(uint64_t num)
5999 VmaUint64ToStr(buf,
sizeof(buf), num);
6003 void VmaStringBuilder::AddPointer(
const void* ptr)
6006 VmaPtrToStr(buf,
sizeof(buf), ptr);
6010 #endif // #if VMA_STATS_STRING_ENABLED 6015 #if VMA_STATS_STRING_ENABLED 6019 VMA_CLASS_NO_COPY(VmaJsonWriter)
6021 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6024 void BeginObject(
bool singleLine =
false);
6027 void BeginArray(
bool singleLine =
false);
6030 void WriteString(
const char* pStr);
6031 void BeginString(
const char* pStr = VMA_NULL);
6032 void ContinueString(
const char* pStr);
6033 void ContinueString(uint32_t n);
6034 void ContinueString(uint64_t n);
6035 void ContinueString_Pointer(
const void* ptr);
6036 void EndString(
const char* pStr = VMA_NULL);
6038 void WriteNumber(uint32_t n);
6039 void WriteNumber(uint64_t n);
6040 void WriteBool(
bool b);
6044 static const char*
const INDENT;
6046 enum COLLECTION_TYPE
6048 COLLECTION_TYPE_OBJECT,
6049 COLLECTION_TYPE_ARRAY,
6053 COLLECTION_TYPE type;
6054 uint32_t valueCount;
6055 bool singleLineMode;
6058 VmaStringBuilder& m_SB;
6059 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6060 bool m_InsideString;
6062 void BeginValue(
bool isString);
6063 void WriteIndent(
bool oneLess =
false);
6066 const char*
const VmaJsonWriter::INDENT =
" ";
6068 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6070 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6071 m_InsideString(false)
6075 VmaJsonWriter::~VmaJsonWriter()
6077 VMA_ASSERT(!m_InsideString);
6078 VMA_ASSERT(m_Stack.empty());
6081 void VmaJsonWriter::BeginObject(
bool singleLine)
6083 VMA_ASSERT(!m_InsideString);
6089 item.type = COLLECTION_TYPE_OBJECT;
6090 item.valueCount = 0;
6091 item.singleLineMode = singleLine;
6092 m_Stack.push_back(item);
6095 void VmaJsonWriter::EndObject()
6097 VMA_ASSERT(!m_InsideString);
6102 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6106 void VmaJsonWriter::BeginArray(
bool singleLine)
6108 VMA_ASSERT(!m_InsideString);
6114 item.type = COLLECTION_TYPE_ARRAY;
6115 item.valueCount = 0;
6116 item.singleLineMode = singleLine;
6117 m_Stack.push_back(item);
6120 void VmaJsonWriter::EndArray()
6122 VMA_ASSERT(!m_InsideString);
6127 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6131 void VmaJsonWriter::WriteString(
const char* pStr)
6137 void VmaJsonWriter::BeginString(
const char* pStr)
6139 VMA_ASSERT(!m_InsideString);
6143 m_InsideString =
true;
6144 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6146 ContinueString(pStr);
6150 void VmaJsonWriter::ContinueString(
const char* pStr)
6152 VMA_ASSERT(m_InsideString);
6154 const size_t strLen = strlen(pStr);
6155 for(
size_t i = 0; i < strLen; ++i)
6188 VMA_ASSERT(0 &&
"Character not currently supported.");
6194 void VmaJsonWriter::ContinueString(uint32_t n)
6196 VMA_ASSERT(m_InsideString);
6200 void VmaJsonWriter::ContinueString(uint64_t n)
6202 VMA_ASSERT(m_InsideString);
6206 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6208 VMA_ASSERT(m_InsideString);
6209 m_SB.AddPointer(ptr);
6212 void VmaJsonWriter::EndString(
const char* pStr)
6214 VMA_ASSERT(m_InsideString);
6215 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6217 ContinueString(pStr);
6220 m_InsideString =
false;
6223 void VmaJsonWriter::WriteNumber(uint32_t n)
6225 VMA_ASSERT(!m_InsideString);
6230 void VmaJsonWriter::WriteNumber(uint64_t n)
6232 VMA_ASSERT(!m_InsideString);
6237 void VmaJsonWriter::WriteBool(
bool b)
6239 VMA_ASSERT(!m_InsideString);
6241 m_SB.Add(b ?
"true" :
"false");
6244 void VmaJsonWriter::WriteNull()
6246 VMA_ASSERT(!m_InsideString);
6251 void VmaJsonWriter::BeginValue(
bool isString)
6253 if(!m_Stack.empty())
6255 StackItem& currItem = m_Stack.back();
6256 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6257 currItem.valueCount % 2 == 0)
6259 VMA_ASSERT(isString);
6262 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6263 currItem.valueCount % 2 != 0)
6267 else if(currItem.valueCount > 0)
6276 ++currItem.valueCount;
6280 void VmaJsonWriter::WriteIndent(
bool oneLess)
6282 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6286 size_t count = m_Stack.size();
6287 if(count > 0 && oneLess)
6291 for(
size_t i = 0; i < count; ++i)
6298 #endif // #if VMA_STATS_STRING_ENABLED 6302 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
6304 if(IsUserDataString())
6306 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
6308 FreeUserDataString(hAllocator);
6310 if(pUserData != VMA_NULL)
6312 const char*
const newStrSrc = (
char*)pUserData;
6313 const size_t newStrLen = strlen(newStrSrc);
6314 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
6315 memcpy(newStrDst, newStrSrc, newStrLen + 1);
6316 m_pUserData = newStrDst;
6321 m_pUserData = pUserData;
6325 void VmaAllocation_T::ChangeBlockAllocation(
6327 VmaDeviceMemoryBlock* block,
6328 VkDeviceSize offset)
6330 VMA_ASSERT(block != VMA_NULL);
6331 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6334 if(block != m_BlockAllocation.m_Block)
6336 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
6337 if(IsPersistentMap())
6339 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
6340 block->Map(hAllocator, mapRefCount, VMA_NULL);
6343 m_BlockAllocation.m_Block = block;
6344 m_BlockAllocation.m_Offset = offset;
6347 VkDeviceSize VmaAllocation_T::GetOffset()
const 6351 case ALLOCATION_TYPE_BLOCK:
6352 return m_BlockAllocation.m_Offset;
6353 case ALLOCATION_TYPE_DEDICATED:
6361 VkDeviceMemory VmaAllocation_T::GetMemory()
const 6365 case ALLOCATION_TYPE_BLOCK:
6366 return m_BlockAllocation.m_Block->GetDeviceMemory();
6367 case ALLOCATION_TYPE_DEDICATED:
6368 return m_DedicatedAllocation.m_hMemory;
6371 return VK_NULL_HANDLE;
6375 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 6379 case ALLOCATION_TYPE_BLOCK:
6380 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
6381 case ALLOCATION_TYPE_DEDICATED:
6382 return m_DedicatedAllocation.m_MemoryTypeIndex;
6389 void* VmaAllocation_T::GetMappedData()
const 6393 case ALLOCATION_TYPE_BLOCK:
6396 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
6397 VMA_ASSERT(pBlockData != VMA_NULL);
6398 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
6405 case ALLOCATION_TYPE_DEDICATED:
6406 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
6407 return m_DedicatedAllocation.m_pMappedData;
6414 bool VmaAllocation_T::CanBecomeLost()
const 6418 case ALLOCATION_TYPE_BLOCK:
6419 return m_BlockAllocation.m_CanBecomeLost;
6420 case ALLOCATION_TYPE_DEDICATED:
6428 VmaPool VmaAllocation_T::GetPool()
const 6430 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6431 return m_BlockAllocation.m_hPool;
6434 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6436 VMA_ASSERT(CanBecomeLost());
6442 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6445 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6450 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6456 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6466 #if VMA_STATS_STRING_ENABLED 6469 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6478 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6480 json.WriteString(
"Type");
6481 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6483 json.WriteString(
"Size");
6484 json.WriteNumber(m_Size);
6486 if(m_pUserData != VMA_NULL)
6488 json.WriteString(
"UserData");
6489 if(IsUserDataString())
6491 json.WriteString((
const char*)m_pUserData);
6496 json.ContinueString_Pointer(m_pUserData);
6501 json.WriteString(
"CreationFrameIndex");
6502 json.WriteNumber(m_CreationFrameIndex);
6504 json.WriteString(
"LastUseFrameIndex");
6505 json.WriteNumber(GetLastUseFrameIndex());
6507 if(m_BufferImageUsage != 0)
6509 json.WriteString(
"Usage");
6510 json.WriteNumber(m_BufferImageUsage);
6516 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6518 VMA_ASSERT(IsUserDataString());
6519 if(m_pUserData != VMA_NULL)
6521 char*
const oldStr = (
char*)m_pUserData;
6522 const size_t oldStrLen = strlen(oldStr);
6523 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6524 m_pUserData = VMA_NULL;
6528 void VmaAllocation_T::BlockAllocMap()
6530 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6532 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6538 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6542 void VmaAllocation_T::BlockAllocUnmap()
6544 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6546 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6552 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6556 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6558 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6562 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6564 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6565 *ppData = m_DedicatedAllocation.m_pMappedData;
6571 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6572 return VK_ERROR_MEMORY_MAP_FAILED;
6577 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6578 hAllocator->m_hDevice,
6579 m_DedicatedAllocation.m_hMemory,
6584 if(result == VK_SUCCESS)
6586 m_DedicatedAllocation.m_pMappedData = *ppData;
6593 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6595 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6597 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6602 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6603 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6604 hAllocator->m_hDevice,
6605 m_DedicatedAllocation.m_hMemory);
6610 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6614 #if VMA_STATS_STRING_ENABLED 6616 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6620 json.WriteString(
"Blocks");
6623 json.WriteString(
"Allocations");
6626 json.WriteString(
"UnusedRanges");
6629 json.WriteString(
"UsedBytes");
6632 json.WriteString(
"UnusedBytes");
6637 json.WriteString(
"AllocationSize");
6638 json.BeginObject(
true);
6639 json.WriteString(
"Min");
6641 json.WriteString(
"Avg");
6643 json.WriteString(
"Max");
6650 json.WriteString(
"UnusedRangeSize");
6651 json.BeginObject(
true);
6652 json.WriteString(
"Min");
6654 json.WriteString(
"Avg");
6656 json.WriteString(
"Max");
6664 #endif // #if VMA_STATS_STRING_ENABLED 6666 struct VmaSuballocationItemSizeLess
6669 const VmaSuballocationList::iterator lhs,
6670 const VmaSuballocationList::iterator rhs)
const 6672 return lhs->size < rhs->size;
6675 const VmaSuballocationList::iterator lhs,
6676 VkDeviceSize rhsSize)
const 6678 return lhs->size < rhsSize;
6686 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
6688 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
6692 #if VMA_STATS_STRING_ENABLED 6694 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6695 VkDeviceSize unusedBytes,
6696 size_t allocationCount,
6697 size_t unusedRangeCount)
const 6701 json.WriteString(
"TotalBytes");
6702 json.WriteNumber(GetSize());
6704 json.WriteString(
"UnusedBytes");
6705 json.WriteNumber(unusedBytes);
6707 json.WriteString(
"Allocations");
6708 json.WriteNumber((uint64_t)allocationCount);
6710 json.WriteString(
"UnusedRanges");
6711 json.WriteNumber((uint64_t)unusedRangeCount);
6713 json.WriteString(
"Suballocations");
6717 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6718 VkDeviceSize offset,
6721 json.BeginObject(
true);
6723 json.WriteString(
"Offset");
6724 json.WriteNumber(offset);
6726 hAllocation->PrintParameters(json);
6731 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6732 VkDeviceSize offset,
6733 VkDeviceSize size)
const 6735 json.BeginObject(
true);
6737 json.WriteString(
"Offset");
6738 json.WriteNumber(offset);
6740 json.WriteString(
"Type");
6741 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6743 json.WriteString(
"Size");
6744 json.WriteNumber(size);
6749 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6755 #endif // #if VMA_STATS_STRING_ENABLED 6760 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6761 VmaBlockMetadata(hAllocator),
6764 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6765 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6769 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6773 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6775 VmaBlockMetadata::Init(size);
6778 m_SumFreeSize = size;
6780 VmaSuballocation suballoc = {};
6781 suballoc.offset = 0;
6782 suballoc.size = size;
6783 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6784 suballoc.hAllocation = VK_NULL_HANDLE;
6786 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
6787 m_Suballocations.push_back(suballoc);
6788 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6790 m_FreeSuballocationsBySize.push_back(suballocItem);
6793 bool VmaBlockMetadata_Generic::Validate()
const 6795 VMA_VALIDATE(!m_Suballocations.empty());
6798 VkDeviceSize calculatedOffset = 0;
6800 uint32_t calculatedFreeCount = 0;
6802 VkDeviceSize calculatedSumFreeSize = 0;
6805 size_t freeSuballocationsToRegister = 0;
6807 bool prevFree =
false;
6809 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6810 suballocItem != m_Suballocations.cend();
6813 const VmaSuballocation& subAlloc = *suballocItem;
6816 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
6818 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6820 VMA_VALIDATE(!prevFree || !currFree);
6822 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
6826 calculatedSumFreeSize += subAlloc.size;
6827 ++calculatedFreeCount;
6828 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6830 ++freeSuballocationsToRegister;
6834 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
6838 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
6839 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
6842 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
6845 calculatedOffset += subAlloc.size;
6846 prevFree = currFree;
6851 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
6853 VkDeviceSize lastSize = 0;
6854 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6856 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6859 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6861 VMA_VALIDATE(suballocItem->size >= lastSize);
6863 lastSize = suballocItem->size;
6867 VMA_VALIDATE(ValidateFreeSuballocationList());
6868 VMA_VALIDATE(calculatedOffset == GetSize());
6869 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
6870 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
6875 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6877 if(!m_FreeSuballocationsBySize.empty())
6879 return m_FreeSuballocationsBySize.back()->size;
6887 bool VmaBlockMetadata_Generic::IsEmpty()
const 6889 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6892 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6896 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6908 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6909 suballocItem != m_Suballocations.cend();
6912 const VmaSuballocation& suballoc = *suballocItem;
6913 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6926 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6928 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6930 inoutStats.
size += GetSize();
6937 #if VMA_STATS_STRING_ENABLED 6939 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6941 PrintDetailedMap_Begin(json,
6943 m_Suballocations.size() - (size_t)m_FreeCount,
6947 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6948 suballocItem != m_Suballocations.cend();
6949 ++suballocItem, ++i)
6951 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6953 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6957 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6961 PrintDetailedMap_End(json);
6964 #endif // #if VMA_STATS_STRING_ENABLED 6966 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6967 uint32_t currentFrameIndex,
6968 uint32_t frameInUseCount,
6969 VkDeviceSize bufferImageGranularity,
6970 VkDeviceSize allocSize,
6971 VkDeviceSize allocAlignment,
6973 VmaSuballocationType allocType,
6974 bool canMakeOtherLost,
6976 VmaAllocationRequest* pAllocationRequest)
6978 VMA_ASSERT(allocSize > 0);
6979 VMA_ASSERT(!upperAddress);
6980 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6981 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6982 VMA_HEAVY_ASSERT(Validate());
6985 if(canMakeOtherLost ==
false &&
6986 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6992 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6993 if(freeSuballocCount > 0)
6998 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6999 m_FreeSuballocationsBySize.data(),
7000 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7001 allocSize + 2 * VMA_DEBUG_MARGIN,
7002 VmaSuballocationItemSizeLess());
7003 size_t index = it - m_FreeSuballocationsBySize.data();
7004 for(; index < freeSuballocCount; ++index)
7009 bufferImageGranularity,
7013 m_FreeSuballocationsBySize[index],
7015 &pAllocationRequest->offset,
7016 &pAllocationRequest->itemsToMakeLostCount,
7017 &pAllocationRequest->sumFreeSize,
7018 &pAllocationRequest->sumItemSize))
7020 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7028 for(
size_t index = freeSuballocCount; index--; )
7033 bufferImageGranularity,
7037 m_FreeSuballocationsBySize[index],
7039 &pAllocationRequest->offset,
7040 &pAllocationRequest->itemsToMakeLostCount,
7041 &pAllocationRequest->sumFreeSize,
7042 &pAllocationRequest->sumItemSize))
7044 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7051 if(canMakeOtherLost)
7055 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
7056 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
7058 VmaAllocationRequest tmpAllocRequest = {};
7059 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7060 suballocIt != m_Suballocations.end();
7063 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7064 suballocIt->hAllocation->CanBecomeLost())
7069 bufferImageGranularity,
7075 &tmpAllocRequest.offset,
7076 &tmpAllocRequest.itemsToMakeLostCount,
7077 &tmpAllocRequest.sumFreeSize,
7078 &tmpAllocRequest.sumItemSize))
7080 tmpAllocRequest.item = suballocIt;
7082 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7085 *pAllocationRequest = tmpAllocRequest;
7091 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7100 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7101 uint32_t currentFrameIndex,
7102 uint32_t frameInUseCount,
7103 VmaAllocationRequest* pAllocationRequest)
7105 while(pAllocationRequest->itemsToMakeLostCount > 0)
7107 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7109 ++pAllocationRequest->item;
7111 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7112 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7113 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7114 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7116 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7117 --pAllocationRequest->itemsToMakeLostCount;
7125 VMA_HEAVY_ASSERT(Validate());
7126 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7127 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7132 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7134 uint32_t lostAllocationCount = 0;
7135 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7136 it != m_Suballocations.end();
7139 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7140 it->hAllocation->CanBecomeLost() &&
7141 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7143 it = FreeSuballocation(it);
7144 ++lostAllocationCount;
7147 return lostAllocationCount;
7150 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7152 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7153 it != m_Suballocations.end();
7156 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7158 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7160 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7161 return VK_ERROR_VALIDATION_FAILED_EXT;
7163 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7165 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7166 return VK_ERROR_VALIDATION_FAILED_EXT;
7174 void VmaBlockMetadata_Generic::Alloc(
7175 const VmaAllocationRequest& request,
7176 VmaSuballocationType type,
7177 VkDeviceSize allocSize,
7181 VMA_ASSERT(!upperAddress);
7182 VMA_ASSERT(request.item != m_Suballocations.end());
7183 VmaSuballocation& suballoc = *request.item;
7185 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7187 VMA_ASSERT(request.offset >= suballoc.offset);
7188 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7189 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7190 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7194 UnregisterFreeSuballocation(request.item);
7196 suballoc.offset = request.offset;
7197 suballoc.size = allocSize;
7198 suballoc.type = type;
7199 suballoc.hAllocation = hAllocation;
7204 VmaSuballocation paddingSuballoc = {};
7205 paddingSuballoc.offset = request.offset + allocSize;
7206 paddingSuballoc.size = paddingEnd;
7207 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7208 VmaSuballocationList::iterator next = request.item;
7210 const VmaSuballocationList::iterator paddingEndItem =
7211 m_Suballocations.insert(next, paddingSuballoc);
7212 RegisterFreeSuballocation(paddingEndItem);
7218 VmaSuballocation paddingSuballoc = {};
7219 paddingSuballoc.offset = request.offset - paddingBegin;
7220 paddingSuballoc.size = paddingBegin;
7221 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7222 const VmaSuballocationList::iterator paddingBeginItem =
7223 m_Suballocations.insert(request.item, paddingSuballoc);
7224 RegisterFreeSuballocation(paddingBeginItem);
7228 m_FreeCount = m_FreeCount - 1;
7229 if(paddingBegin > 0)
7237 m_SumFreeSize -= allocSize;
7240 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7242 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7243 suballocItem != m_Suballocations.end();
7246 VmaSuballocation& suballoc = *suballocItem;
7247 if(suballoc.hAllocation == allocation)
7249 FreeSuballocation(suballocItem);
7250 VMA_HEAVY_ASSERT(Validate());
7254 VMA_ASSERT(0 &&
"Not found!");
7257 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7259 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7260 suballocItem != m_Suballocations.end();
7263 VmaSuballocation& suballoc = *suballocItem;
7264 if(suballoc.offset == offset)
7266 FreeSuballocation(suballocItem);
7270 VMA_ASSERT(0 &&
"Not found!");
7273 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 7275 VkDeviceSize lastSize = 0;
7276 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7278 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7280 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
7281 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7282 VMA_VALIDATE(it->size >= lastSize);
7283 lastSize = it->size;
7288 bool VmaBlockMetadata_Generic::CheckAllocation(
7289 uint32_t currentFrameIndex,
7290 uint32_t frameInUseCount,
7291 VkDeviceSize bufferImageGranularity,
7292 VkDeviceSize allocSize,
7293 VkDeviceSize allocAlignment,
7294 VmaSuballocationType allocType,
7295 VmaSuballocationList::const_iterator suballocItem,
7296 bool canMakeOtherLost,
7297 VkDeviceSize* pOffset,
7298 size_t* itemsToMakeLostCount,
7299 VkDeviceSize* pSumFreeSize,
7300 VkDeviceSize* pSumItemSize)
const 7302 VMA_ASSERT(allocSize > 0);
7303 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7304 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7305 VMA_ASSERT(pOffset != VMA_NULL);
7307 *itemsToMakeLostCount = 0;
7311 if(canMakeOtherLost)
7313 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7315 *pSumFreeSize = suballocItem->size;
7319 if(suballocItem->hAllocation->CanBecomeLost() &&
7320 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7322 ++*itemsToMakeLostCount;
7323 *pSumItemSize = suballocItem->size;
7332 if(GetSize() - suballocItem->offset < allocSize)
7338 *pOffset = suballocItem->offset;
7341 if(VMA_DEBUG_MARGIN > 0)
7343 *pOffset += VMA_DEBUG_MARGIN;
7347 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7351 if(bufferImageGranularity > 1)
7353 bool bufferImageGranularityConflict =
false;
7354 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7355 while(prevSuballocItem != m_Suballocations.cbegin())
7358 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7359 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7361 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7363 bufferImageGranularityConflict =
true;
7371 if(bufferImageGranularityConflict)
7373 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7379 if(*pOffset >= suballocItem->offset + suballocItem->size)
7385 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
7388 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7390 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
7392 if(suballocItem->offset + totalSize > GetSize())
7399 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7400 if(totalSize > suballocItem->size)
7402 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7403 while(remainingSize > 0)
7406 if(lastSuballocItem == m_Suballocations.cend())
7410 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7412 *pSumFreeSize += lastSuballocItem->size;
7416 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7417 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7418 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7420 ++*itemsToMakeLostCount;
7421 *pSumItemSize += lastSuballocItem->size;
7428 remainingSize = (lastSuballocItem->size < remainingSize) ?
7429 remainingSize - lastSuballocItem->size : 0;
7435 if(bufferImageGranularity > 1)
7437 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7439 while(nextSuballocItem != m_Suballocations.cend())
7441 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7442 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7444 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7446 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7447 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7448 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7450 ++*itemsToMakeLostCount;
7469 const VmaSuballocation& suballoc = *suballocItem;
7470 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7472 *pSumFreeSize = suballoc.size;
7475 if(suballoc.size < allocSize)
7481 *pOffset = suballoc.offset;
7484 if(VMA_DEBUG_MARGIN > 0)
7486 *pOffset += VMA_DEBUG_MARGIN;
7490 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7494 if(bufferImageGranularity > 1)
7496 bool bufferImageGranularityConflict =
false;
7497 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7498 while(prevSuballocItem != m_Suballocations.cbegin())
7501 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7502 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7504 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7506 bufferImageGranularityConflict =
true;
7514 if(bufferImageGranularityConflict)
7516 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7521 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7524 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7527 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7534 if(bufferImageGranularity > 1)
7536 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7538 while(nextSuballocItem != m_Suballocations.cend())
7540 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7541 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7543 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7562 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7564 VMA_ASSERT(item != m_Suballocations.end());
7565 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7567 VmaSuballocationList::iterator nextItem = item;
7569 VMA_ASSERT(nextItem != m_Suballocations.end());
7570 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7572 item->size += nextItem->size;
7574 m_Suballocations.erase(nextItem);
7577 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7580 VmaSuballocation& suballoc = *suballocItem;
7581 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7582 suballoc.hAllocation = VK_NULL_HANDLE;
7586 m_SumFreeSize += suballoc.size;
7589 bool mergeWithNext =
false;
7590 bool mergeWithPrev =
false;
7592 VmaSuballocationList::iterator nextItem = suballocItem;
7594 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7596 mergeWithNext =
true;
7599 VmaSuballocationList::iterator prevItem = suballocItem;
7600 if(suballocItem != m_Suballocations.begin())
7603 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7605 mergeWithPrev =
true;
7611 UnregisterFreeSuballocation(nextItem);
7612 MergeFreeWithNext(suballocItem);
7617 UnregisterFreeSuballocation(prevItem);
7618 MergeFreeWithNext(prevItem);
7619 RegisterFreeSuballocation(prevItem);
7624 RegisterFreeSuballocation(suballocItem);
7625 return suballocItem;
7629 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7631 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7632 VMA_ASSERT(item->size > 0);
7636 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7638 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7640 if(m_FreeSuballocationsBySize.empty())
7642 m_FreeSuballocationsBySize.push_back(item);
7646 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7654 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7656 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7657 VMA_ASSERT(item->size > 0);
7661 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7663 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7665 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7666 m_FreeSuballocationsBySize.data(),
7667 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7669 VmaSuballocationItemSizeLess());
7670 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7671 index < m_FreeSuballocationsBySize.size();
7674 if(m_FreeSuballocationsBySize[index] == item)
7676 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7679 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7681 VMA_ASSERT(0 &&
"Not found.");
7690 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7691 VmaBlockMetadata(hAllocator),
7693 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7694 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7695 m_1stVectorIndex(0),
7696 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7697 m_1stNullItemsBeginCount(0),
7698 m_1stNullItemsMiddleCount(0),
7699 m_2ndNullItemsCount(0)
7703 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7707 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7709 VmaBlockMetadata::Init(size);
7710 m_SumFreeSize = size;
7713 bool VmaBlockMetadata_Linear::Validate()
const 7715 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7716 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7718 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
7719 VMA_VALIDATE(!suballocations1st.empty() ||
7720 suballocations2nd.empty() ||
7721 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
7723 if(!suballocations1st.empty())
7726 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
7728 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
7730 if(!suballocations2nd.empty())
7733 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
7736 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
7737 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
7739 VkDeviceSize sumUsedSize = 0;
7740 const size_t suballoc1stCount = suballocations1st.size();
7741 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7743 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7745 const size_t suballoc2ndCount = suballocations2nd.size();
7746 size_t nullItem2ndCount = 0;
7747 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7749 const VmaSuballocation& suballoc = suballocations2nd[i];
7750 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7752 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7753 VMA_VALIDATE(suballoc.offset >= offset);
7757 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7758 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7759 sumUsedSize += suballoc.size;
7766 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7769 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7772 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7774 const VmaSuballocation& suballoc = suballocations1st[i];
7775 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
7776 suballoc.hAllocation == VK_NULL_HANDLE);
7779 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7781 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7783 const VmaSuballocation& suballoc = suballocations1st[i];
7784 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7786 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7787 VMA_VALIDATE(suballoc.offset >= offset);
7788 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
7792 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7793 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7794 sumUsedSize += suballoc.size;
7801 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7803 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
7805 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7807 const size_t suballoc2ndCount = suballocations2nd.size();
7808 size_t nullItem2ndCount = 0;
7809 for(
size_t i = suballoc2ndCount; i--; )
7811 const VmaSuballocation& suballoc = suballocations2nd[i];
7812 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7814 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7815 VMA_VALIDATE(suballoc.offset >= offset);
7819 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7820 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7821 sumUsedSize += suballoc.size;
7828 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7831 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7834 VMA_VALIDATE(offset <= GetSize());
7835 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
7840 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7842 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7843 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7846 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7848 const VkDeviceSize size = GetSize();
7860 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7862 switch(m_2ndVectorMode)
7864 case SECOND_VECTOR_EMPTY:
7870 const size_t suballocations1stCount = suballocations1st.size();
7871 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
7872 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
7873 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
7875 firstSuballoc.offset,
7876 size - (lastSuballoc.offset + lastSuballoc.size));
7880 case SECOND_VECTOR_RING_BUFFER:
7885 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7886 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
7887 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
7888 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
7892 case SECOND_VECTOR_DOUBLE_STACK:
7897 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7898 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
7899 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
7900 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
7910 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7912 const VkDeviceSize size = GetSize();
7913 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7914 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7915 const size_t suballoc1stCount = suballocations1st.size();
7916 const size_t suballoc2ndCount = suballocations2nd.size();
7927 VkDeviceSize lastOffset = 0;
7929 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7931 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7932 size_t nextAlloc2ndIndex = 0;
7933 while(lastOffset < freeSpace2ndTo1stEnd)
7936 while(nextAlloc2ndIndex < suballoc2ndCount &&
7937 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7939 ++nextAlloc2ndIndex;
7943 if(nextAlloc2ndIndex < suballoc2ndCount)
7945 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7948 if(lastOffset < suballoc.offset)
7951 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7965 lastOffset = suballoc.offset + suballoc.size;
7966 ++nextAlloc2ndIndex;
7972 if(lastOffset < freeSpace2ndTo1stEnd)
7974 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7982 lastOffset = freeSpace2ndTo1stEnd;
7987 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7988 const VkDeviceSize freeSpace1stTo2ndEnd =
7989 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
7990 while(lastOffset < freeSpace1stTo2ndEnd)
7993 while(nextAlloc1stIndex < suballoc1stCount &&
7994 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
7996 ++nextAlloc1stIndex;
8000 if(nextAlloc1stIndex < suballoc1stCount)
8002 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8005 if(lastOffset < suballoc.offset)
8008 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8022 lastOffset = suballoc.offset + suballoc.size;
8023 ++nextAlloc1stIndex;
8029 if(lastOffset < freeSpace1stTo2ndEnd)
8031 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8039 lastOffset = freeSpace1stTo2ndEnd;
8043 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8045 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8046 while(lastOffset < size)
8049 while(nextAlloc2ndIndex != SIZE_MAX &&
8050 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8052 --nextAlloc2ndIndex;
8056 if(nextAlloc2ndIndex != SIZE_MAX)
8058 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8061 if(lastOffset < suballoc.offset)
8064 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8078 lastOffset = suballoc.offset + suballoc.size;
8079 --nextAlloc2ndIndex;
8085 if(lastOffset < size)
8087 const VkDeviceSize unusedRangeSize = size - lastOffset;
8103 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8105 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8106 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8107 const VkDeviceSize size = GetSize();
8108 const size_t suballoc1stCount = suballocations1st.size();
8109 const size_t suballoc2ndCount = suballocations2nd.size();
8111 inoutStats.
size += size;
8113 VkDeviceSize lastOffset = 0;
8115 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8117 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8118 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8119 while(lastOffset < freeSpace2ndTo1stEnd)
8122 while(nextAlloc2ndIndex < suballoc2ndCount &&
8123 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8125 ++nextAlloc2ndIndex;
8129 if(nextAlloc2ndIndex < suballoc2ndCount)
8131 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8134 if(lastOffset < suballoc.offset)
8137 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8148 lastOffset = suballoc.offset + suballoc.size;
8149 ++nextAlloc2ndIndex;
8154 if(lastOffset < freeSpace2ndTo1stEnd)
8157 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8164 lastOffset = freeSpace2ndTo1stEnd;
8169 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8170 const VkDeviceSize freeSpace1stTo2ndEnd =
8171 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8172 while(lastOffset < freeSpace1stTo2ndEnd)
8175 while(nextAlloc1stIndex < suballoc1stCount &&
8176 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8178 ++nextAlloc1stIndex;
8182 if(nextAlloc1stIndex < suballoc1stCount)
8184 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8187 if(lastOffset < suballoc.offset)
8190 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8201 lastOffset = suballoc.offset + suballoc.size;
8202 ++nextAlloc1stIndex;
8207 if(lastOffset < freeSpace1stTo2ndEnd)
8210 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8217 lastOffset = freeSpace1stTo2ndEnd;
8221 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8223 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8224 while(lastOffset < size)
8227 while(nextAlloc2ndIndex != SIZE_MAX &&
8228 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8230 --nextAlloc2ndIndex;
8234 if(nextAlloc2ndIndex != SIZE_MAX)
8236 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8239 if(lastOffset < suballoc.offset)
8242 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8253 lastOffset = suballoc.offset + suballoc.size;
8254 --nextAlloc2ndIndex;
8259 if(lastOffset < size)
8262 const VkDeviceSize unusedRangeSize = size - lastOffset;
8275 #if VMA_STATS_STRING_ENABLED 8276 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 8278 const VkDeviceSize size = GetSize();
8279 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8280 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8281 const size_t suballoc1stCount = suballocations1st.size();
8282 const size_t suballoc2ndCount = suballocations2nd.size();
8286 size_t unusedRangeCount = 0;
8287 VkDeviceSize usedBytes = 0;
8289 VkDeviceSize lastOffset = 0;
8291 size_t alloc2ndCount = 0;
8292 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8294 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8295 size_t nextAlloc2ndIndex = 0;
8296 while(lastOffset < freeSpace2ndTo1stEnd)
8299 while(nextAlloc2ndIndex < suballoc2ndCount &&
8300 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8302 ++nextAlloc2ndIndex;
8306 if(nextAlloc2ndIndex < suballoc2ndCount)
8308 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8311 if(lastOffset < suballoc.offset)
8320 usedBytes += suballoc.size;
8323 lastOffset = suballoc.offset + suballoc.size;
8324 ++nextAlloc2ndIndex;
8329 if(lastOffset < freeSpace2ndTo1stEnd)
8336 lastOffset = freeSpace2ndTo1stEnd;
8341 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8342 size_t alloc1stCount = 0;
8343 const VkDeviceSize freeSpace1stTo2ndEnd =
8344 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8345 while(lastOffset < freeSpace1stTo2ndEnd)
8348 while(nextAlloc1stIndex < suballoc1stCount &&
8349 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8351 ++nextAlloc1stIndex;
8355 if(nextAlloc1stIndex < suballoc1stCount)
8357 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8360 if(lastOffset < suballoc.offset)
8369 usedBytes += suballoc.size;
8372 lastOffset = suballoc.offset + suballoc.size;
8373 ++nextAlloc1stIndex;
8378 if(lastOffset < size)
8385 lastOffset = freeSpace1stTo2ndEnd;
8389 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8391 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8392 while(lastOffset < size)
8395 while(nextAlloc2ndIndex != SIZE_MAX &&
8396 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8398 --nextAlloc2ndIndex;
8402 if(nextAlloc2ndIndex != SIZE_MAX)
8404 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8407 if(lastOffset < suballoc.offset)
8416 usedBytes += suballoc.size;
8419 lastOffset = suballoc.offset + suballoc.size;
8420 --nextAlloc2ndIndex;
8425 if(lastOffset < size)
8437 const VkDeviceSize unusedBytes = size - usedBytes;
8438 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8443 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8445 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8446 size_t nextAlloc2ndIndex = 0;
8447 while(lastOffset < freeSpace2ndTo1stEnd)
8450 while(nextAlloc2ndIndex < suballoc2ndCount &&
8451 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8453 ++nextAlloc2ndIndex;
8457 if(nextAlloc2ndIndex < suballoc2ndCount)
8459 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8462 if(lastOffset < suballoc.offset)
8465 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8466 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8471 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8474 lastOffset = suballoc.offset + suballoc.size;
8475 ++nextAlloc2ndIndex;
8480 if(lastOffset < freeSpace2ndTo1stEnd)
8483 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8484 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8488 lastOffset = freeSpace2ndTo1stEnd;
8493 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8494 while(lastOffset < freeSpace1stTo2ndEnd)
8497 while(nextAlloc1stIndex < suballoc1stCount &&
8498 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8500 ++nextAlloc1stIndex;
8504 if(nextAlloc1stIndex < suballoc1stCount)
8506 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8509 if(lastOffset < suballoc.offset)
8512 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8513 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8518 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8521 lastOffset = suballoc.offset + suballoc.size;
8522 ++nextAlloc1stIndex;
8527 if(lastOffset < freeSpace1stTo2ndEnd)
8530 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8531 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8535 lastOffset = freeSpace1stTo2ndEnd;
8539 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8541 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8542 while(lastOffset < size)
8545 while(nextAlloc2ndIndex != SIZE_MAX &&
8546 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8548 --nextAlloc2ndIndex;
8552 if(nextAlloc2ndIndex != SIZE_MAX)
8554 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8557 if(lastOffset < suballoc.offset)
8560 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8561 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8566 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8569 lastOffset = suballoc.offset + suballoc.size;
8570 --nextAlloc2ndIndex;
8575 if(lastOffset < size)
8578 const VkDeviceSize unusedRangeSize = size - lastOffset;
8579 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8588 PrintDetailedMap_End(json);
8590 #endif // #if VMA_STATS_STRING_ENABLED 8592 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8593 uint32_t currentFrameIndex,
8594 uint32_t frameInUseCount,
8595 VkDeviceSize bufferImageGranularity,
8596 VkDeviceSize allocSize,
8597 VkDeviceSize allocAlignment,
8599 VmaSuballocationType allocType,
8600 bool canMakeOtherLost,
8602 VmaAllocationRequest* pAllocationRequest)
8604 VMA_ASSERT(allocSize > 0);
8605 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8606 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8607 VMA_HEAVY_ASSERT(Validate());
8609 const VkDeviceSize size = GetSize();
8610 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8611 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8615 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8617 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8622 if(allocSize > size)
8626 VkDeviceSize resultBaseOffset = size - allocSize;
8627 if(!suballocations2nd.empty())
8629 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8630 resultBaseOffset = lastSuballoc.offset - allocSize;
8631 if(allocSize > lastSuballoc.offset)
8638 VkDeviceSize resultOffset = resultBaseOffset;
8641 if(VMA_DEBUG_MARGIN > 0)
8643 if(resultOffset < VMA_DEBUG_MARGIN)
8647 resultOffset -= VMA_DEBUG_MARGIN;
8651 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8655 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8657 bool bufferImageGranularityConflict =
false;
8658 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8660 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8661 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8663 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8665 bufferImageGranularityConflict =
true;
8673 if(bufferImageGranularityConflict)
8675 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8680 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8681 suballocations1st.back().offset + suballocations1st.back().size :
8683 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8687 if(bufferImageGranularity > 1)
8689 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8691 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8692 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8694 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8708 pAllocationRequest->offset = resultOffset;
8709 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8710 pAllocationRequest->sumItemSize = 0;
8712 pAllocationRequest->itemsToMakeLostCount = 0;
8718 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8722 VkDeviceSize resultBaseOffset = 0;
8723 if(!suballocations1st.empty())
8725 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8726 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8730 VkDeviceSize resultOffset = resultBaseOffset;
8733 if(VMA_DEBUG_MARGIN > 0)
8735 resultOffset += VMA_DEBUG_MARGIN;
8739 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8743 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8745 bool bufferImageGranularityConflict =
false;
8746 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8748 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8749 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8751 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8753 bufferImageGranularityConflict =
true;
8761 if(bufferImageGranularityConflict)
8763 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8767 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8768 suballocations2nd.back().offset : size;
8771 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8775 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8777 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8779 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8780 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8782 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8796 pAllocationRequest->offset = resultOffset;
8797 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8798 pAllocationRequest->sumItemSize = 0;
8800 pAllocationRequest->itemsToMakeLostCount = 0;
8807 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8809 VMA_ASSERT(!suballocations1st.empty());
8811 VkDeviceSize resultBaseOffset = 0;
8812 if(!suballocations2nd.empty())
8814 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8815 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8819 VkDeviceSize resultOffset = resultBaseOffset;
8822 if(VMA_DEBUG_MARGIN > 0)
8824 resultOffset += VMA_DEBUG_MARGIN;
8828 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8832 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8834 bool bufferImageGranularityConflict =
false;
8835 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8837 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8838 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8840 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8842 bufferImageGranularityConflict =
true;
8850 if(bufferImageGranularityConflict)
8852 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8856 pAllocationRequest->itemsToMakeLostCount = 0;
8857 pAllocationRequest->sumItemSize = 0;
8858 size_t index1st = m_1stNullItemsBeginCount;
8860 if(canMakeOtherLost)
8862 while(index1st < suballocations1st.size() &&
8863 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8866 const VmaSuballocation& suballoc = suballocations1st[index1st];
8867 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
8873 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8874 if(suballoc.hAllocation->CanBecomeLost() &&
8875 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8877 ++pAllocationRequest->itemsToMakeLostCount;
8878 pAllocationRequest->sumItemSize += suballoc.size;
8890 if(bufferImageGranularity > 1)
8892 while(index1st < suballocations1st.size())
8894 const VmaSuballocation& suballoc = suballocations1st[index1st];
8895 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
8897 if(suballoc.hAllocation != VK_NULL_HANDLE)
8900 if(suballoc.hAllocation->CanBecomeLost() &&
8901 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8903 ++pAllocationRequest->itemsToMakeLostCount;
8904 pAllocationRequest->sumItemSize += suballoc.size;
8923 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
8924 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
8928 if(bufferImageGranularity > 1)
8930 for(
size_t nextSuballocIndex = index1st;
8931 nextSuballocIndex < suballocations1st.size();
8932 nextSuballocIndex++)
8934 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
8935 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8937 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8951 pAllocationRequest->offset = resultOffset;
8952 pAllocationRequest->sumFreeSize =
8953 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
8955 - pAllocationRequest->sumItemSize;
8965 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
8966 uint32_t currentFrameIndex,
8967 uint32_t frameInUseCount,
8968 VmaAllocationRequest* pAllocationRequest)
8970 if(pAllocationRequest->itemsToMakeLostCount == 0)
8975 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
8977 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8978 size_t index1st = m_1stNullItemsBeginCount;
8979 size_t madeLostCount = 0;
8980 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
8982 VMA_ASSERT(index1st < suballocations1st.size());
8983 VmaSuballocation& suballoc = suballocations1st[index1st];
8984 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8986 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8987 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
8988 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8990 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8991 suballoc.hAllocation = VK_NULL_HANDLE;
8992 m_SumFreeSize += suballoc.size;
8993 ++m_1stNullItemsMiddleCount;
9010 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9012 uint32_t lostAllocationCount = 0;
9014 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9015 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9017 VmaSuballocation& suballoc = suballocations1st[i];
9018 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9019 suballoc.hAllocation->CanBecomeLost() &&
9020 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9022 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9023 suballoc.hAllocation = VK_NULL_HANDLE;
9024 ++m_1stNullItemsMiddleCount;
9025 m_SumFreeSize += suballoc.size;
9026 ++lostAllocationCount;
9030 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9031 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9033 VmaSuballocation& suballoc = suballocations2nd[i];
9034 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9035 suballoc.hAllocation->CanBecomeLost() &&
9036 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9038 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9039 suballoc.hAllocation = VK_NULL_HANDLE;
9040 ++m_2ndNullItemsCount;
9041 ++lostAllocationCount;
9045 if(lostAllocationCount)
9050 return lostAllocationCount;
9053 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
9055 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9056 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9058 const VmaSuballocation& suballoc = suballocations1st[i];
9059 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9061 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9063 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9064 return VK_ERROR_VALIDATION_FAILED_EXT;
9066 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9068 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9069 return VK_ERROR_VALIDATION_FAILED_EXT;
9074 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9075 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9077 const VmaSuballocation& suballoc = suballocations2nd[i];
9078 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9080 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9082 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9083 return VK_ERROR_VALIDATION_FAILED_EXT;
9085 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9087 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9088 return VK_ERROR_VALIDATION_FAILED_EXT;
9096 void VmaBlockMetadata_Linear::Alloc(
9097 const VmaAllocationRequest& request,
9098 VmaSuballocationType type,
9099 VkDeviceSize allocSize,
9103 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9107 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
9108 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
9109 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9110 suballocations2nd.push_back(newSuballoc);
9111 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
9115 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9118 if(suballocations1st.empty())
9120 suballocations1st.push_back(newSuballoc);
9125 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
9128 VMA_ASSERT(request.offset + allocSize <= GetSize());
9129 suballocations1st.push_back(newSuballoc);
9132 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
9134 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9136 switch(m_2ndVectorMode)
9138 case SECOND_VECTOR_EMPTY:
9140 VMA_ASSERT(suballocations2nd.empty());
9141 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
9143 case SECOND_VECTOR_RING_BUFFER:
9145 VMA_ASSERT(!suballocations2nd.empty());
9147 case SECOND_VECTOR_DOUBLE_STACK:
9148 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
9154 suballocations2nd.push_back(newSuballoc);
9158 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
9163 m_SumFreeSize -= newSuballoc.size;
9166 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
9168 FreeAtOffset(allocation->GetOffset());
9171 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
9173 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9174 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9176 if(!suballocations1st.empty())
9179 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9180 if(firstSuballoc.offset == offset)
9182 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9183 firstSuballoc.hAllocation = VK_NULL_HANDLE;
9184 m_SumFreeSize += firstSuballoc.size;
9185 ++m_1stNullItemsBeginCount;
9192 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
9193 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9195 VmaSuballocation& lastSuballoc = suballocations2nd.back();
9196 if(lastSuballoc.offset == offset)
9198 m_SumFreeSize += lastSuballoc.size;
9199 suballocations2nd.pop_back();
9205 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
9207 VmaSuballocation& lastSuballoc = suballocations1st.back();
9208 if(lastSuballoc.offset == offset)
9210 m_SumFreeSize += lastSuballoc.size;
9211 suballocations1st.pop_back();
9219 VmaSuballocation refSuballoc;
9220 refSuballoc.offset = offset;
9222 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
9223 suballocations1st.begin() + m_1stNullItemsBeginCount,
9224 suballocations1st.end(),
9226 if(it != suballocations1st.end())
9228 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9229 it->hAllocation = VK_NULL_HANDLE;
9230 ++m_1stNullItemsMiddleCount;
9231 m_SumFreeSize += it->size;
9237 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
9240 VmaSuballocation refSuballoc;
9241 refSuballoc.offset = offset;
9243 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
9244 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
9245 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
9246 if(it != suballocations2nd.end())
9248 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9249 it->hAllocation = VK_NULL_HANDLE;
9250 ++m_2ndNullItemsCount;
9251 m_SumFreeSize += it->size;
9257 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
9260 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 9262 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9263 const size_t suballocCount = AccessSuballocations1st().size();
9264 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
9267 void VmaBlockMetadata_Linear::CleanupAfterFree()
9269 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9270 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9274 suballocations1st.clear();
9275 suballocations2nd.clear();
9276 m_1stNullItemsBeginCount = 0;
9277 m_1stNullItemsMiddleCount = 0;
9278 m_2ndNullItemsCount = 0;
9279 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9283 const size_t suballoc1stCount = suballocations1st.size();
9284 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9285 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
9288 while(m_1stNullItemsBeginCount < suballoc1stCount &&
9289 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9291 ++m_1stNullItemsBeginCount;
9292 --m_1stNullItemsMiddleCount;
9296 while(m_1stNullItemsMiddleCount > 0 &&
9297 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
9299 --m_1stNullItemsMiddleCount;
9300 suballocations1st.pop_back();
9304 while(m_2ndNullItemsCount > 0 &&
9305 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
9307 --m_2ndNullItemsCount;
9308 suballocations2nd.pop_back();
9311 if(ShouldCompact1st())
9313 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
9314 size_t srcIndex = m_1stNullItemsBeginCount;
9315 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
9317 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
9321 if(dstIndex != srcIndex)
9323 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9327 suballocations1st.resize(nonNullItemCount);
9328 m_1stNullItemsBeginCount = 0;
9329 m_1stNullItemsMiddleCount = 0;
9333 if(suballocations2nd.empty())
9335 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9339 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9341 suballocations1st.clear();
9342 m_1stNullItemsBeginCount = 0;
9344 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9347 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9348 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9349 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9350 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9352 ++m_1stNullItemsBeginCount;
9353 --m_1stNullItemsMiddleCount;
9355 m_2ndNullItemsCount = 0;
9356 m_1stVectorIndex ^= 1;
9361 VMA_HEAVY_ASSERT(Validate());
9368 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
9369 VmaBlockMetadata(hAllocator),
9371 m_AllocationCount(0),
9375 memset(m_FreeList, 0,
sizeof(m_FreeList));
9378 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9383 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
9385 VmaBlockMetadata::Init(size);
9387 m_UsableSize = VmaPrevPow2(size);
9388 m_SumFreeSize = m_UsableSize;
9392 while(m_LevelCount < MAX_LEVELS &&
9393 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
9398 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
9399 rootNode->offset = 0;
9400 rootNode->type = Node::TYPE_FREE;
9401 rootNode->parent = VMA_NULL;
9402 rootNode->buddy = VMA_NULL;
9405 AddToFreeListFront(0, rootNode);
9408 bool VmaBlockMetadata_Buddy::Validate()
const 9411 ValidationContext ctx;
9412 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
9414 VMA_VALIDATE(
false &&
"ValidateNode failed.");
9416 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
9417 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
9420 for(uint32_t level = 0; level < m_LevelCount; ++level)
9422 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
9423 m_FreeList[level].front->free.prev == VMA_NULL);
9425 for(Node* node = m_FreeList[level].front;
9427 node = node->free.next)
9429 VMA_VALIDATE(node->type == Node::TYPE_FREE);
9431 if(node->free.next == VMA_NULL)
9433 VMA_VALIDATE(m_FreeList[level].back == node);
9437 VMA_VALIDATE(node->free.next->free.prev == node);
9443 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
9445 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
9451 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 9453 for(uint32_t level = 0; level < m_LevelCount; ++level)
9455 if(m_FreeList[level].front != VMA_NULL)
9457 return LevelToNodeSize(level);
9463 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9465 const VkDeviceSize unusableSize = GetUnusableSize();
9476 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
9478 if(unusableSize > 0)
9487 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 9489 const VkDeviceSize unusableSize = GetUnusableSize();
9491 inoutStats.
size += GetSize();
9492 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
9497 if(unusableSize > 0)
9504 #if VMA_STATS_STRING_ENABLED 9506 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 9510 CalcAllocationStatInfo(stat);
9512 PrintDetailedMap_Begin(
9518 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
9520 const VkDeviceSize unusableSize = GetUnusableSize();
9521 if(unusableSize > 0)
9523 PrintDetailedMap_UnusedRange(json,
9528 PrintDetailedMap_End(json);
9531 #endif // #if VMA_STATS_STRING_ENABLED 9533 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9534 uint32_t currentFrameIndex,
9535 uint32_t frameInUseCount,
9536 VkDeviceSize bufferImageGranularity,
9537 VkDeviceSize allocSize,
9538 VkDeviceSize allocAlignment,
9540 VmaSuballocationType allocType,
9541 bool canMakeOtherLost,
9543 VmaAllocationRequest* pAllocationRequest)
9545 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9549 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
9550 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
9551 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
9553 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
9554 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
9557 if(allocSize > m_UsableSize)
9562 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9563 for(uint32_t level = targetLevel + 1; level--; )
9565 for(Node* freeNode = m_FreeList[level].front;
9566 freeNode != VMA_NULL;
9567 freeNode = freeNode->free.next)
9569 if(freeNode->offset % allocAlignment == 0)
9571 pAllocationRequest->offset = freeNode->offset;
9572 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
9573 pAllocationRequest->sumItemSize = 0;
9574 pAllocationRequest->itemsToMakeLostCount = 0;
9575 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9584 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
9585 uint32_t currentFrameIndex,
9586 uint32_t frameInUseCount,
9587 VmaAllocationRequest* pAllocationRequest)
9593 return pAllocationRequest->itemsToMakeLostCount == 0;
9596 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9605 void VmaBlockMetadata_Buddy::Alloc(
9606 const VmaAllocationRequest& request,
9607 VmaSuballocationType type,
9608 VkDeviceSize allocSize,
9612 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9613 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
9615 Node* currNode = m_FreeList[currLevel].front;
9616 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9617 while(currNode->offset != request.offset)
9619 currNode = currNode->free.next;
9620 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9624 while(currLevel < targetLevel)
9628 RemoveFromFreeList(currLevel, currNode);
9630 const uint32_t childrenLevel = currLevel + 1;
9633 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
9634 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
9636 leftChild->offset = currNode->offset;
9637 leftChild->type = Node::TYPE_FREE;
9638 leftChild->parent = currNode;
9639 leftChild->buddy = rightChild;
9641 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9642 rightChild->type = Node::TYPE_FREE;
9643 rightChild->parent = currNode;
9644 rightChild->buddy = leftChild;
9647 currNode->type = Node::TYPE_SPLIT;
9648 currNode->split.leftChild = leftChild;
9651 AddToFreeListFront(childrenLevel, rightChild);
9652 AddToFreeListFront(childrenLevel, leftChild);
9657 currNode = m_FreeList[currLevel].front;
9666 VMA_ASSERT(currLevel == targetLevel &&
9667 currNode != VMA_NULL &&
9668 currNode->type == Node::TYPE_FREE);
9669 RemoveFromFreeList(currLevel, currNode);
9672 currNode->type = Node::TYPE_ALLOCATION;
9673 currNode->allocation.alloc = hAllocation;
9675 ++m_AllocationCount;
9677 m_SumFreeSize -= allocSize;
9680 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
9682 if(node->type == Node::TYPE_SPLIT)
9684 DeleteNode(node->split.leftChild->buddy);
9685 DeleteNode(node->split.leftChild);
9688 vma_delete(GetAllocationCallbacks(), node);
9691 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 9693 VMA_VALIDATE(level < m_LevelCount);
9694 VMA_VALIDATE(curr->parent == parent);
9695 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
9696 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
9699 case Node::TYPE_FREE:
9701 ctx.calculatedSumFreeSize += levelNodeSize;
9702 ++ctx.calculatedFreeCount;
9704 case Node::TYPE_ALLOCATION:
9705 ++ctx.calculatedAllocationCount;
9706 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
9707 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
9709 case Node::TYPE_SPLIT:
9711 const uint32_t childrenLevel = level + 1;
9712 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
9713 const Node*
const leftChild = curr->split.leftChild;
9714 VMA_VALIDATE(leftChild != VMA_NULL);
9715 VMA_VALIDATE(leftChild->offset == curr->offset);
9716 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
9718 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
9720 const Node*
const rightChild = leftChild->buddy;
9721 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
9722 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
9724 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
9735 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 9739 VkDeviceSize currLevelNodeSize = m_UsableSize;
9740 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
9741 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
9744 currLevelNodeSize = nextLevelNodeSize;
9745 nextLevelNodeSize = currLevelNodeSize >> 1;
9750 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
9753 Node* node = m_Root;
9754 VkDeviceSize nodeOffset = 0;
9756 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
9757 while(node->type == Node::TYPE_SPLIT)
9759 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
9760 if(offset < nodeOffset + nextLevelSize)
9762 node = node->split.leftChild;
9766 node = node->split.leftChild->buddy;
9767 nodeOffset += nextLevelSize;
9770 levelNodeSize = nextLevelSize;
9773 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9774 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
9777 --m_AllocationCount;
9778 m_SumFreeSize += alloc->GetSize();
9780 node->type = Node::TYPE_FREE;
9783 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
9785 RemoveFromFreeList(level, node->buddy);
9786 Node*
const parent = node->parent;
9788 vma_delete(GetAllocationCallbacks(), node->buddy);
9789 vma_delete(GetAllocationCallbacks(), node);
9790 parent->type = Node::TYPE_FREE;
9798 AddToFreeListFront(level, node);
9801 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 9805 case Node::TYPE_FREE:
9811 case Node::TYPE_ALLOCATION:
9813 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9819 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
9820 if(unusedRangeSize > 0)
9829 case Node::TYPE_SPLIT:
9831 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9832 const Node*
const leftChild = node->split.leftChild;
9833 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
9834 const Node*
const rightChild = leftChild->buddy;
9835 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
9843 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
9845 VMA_ASSERT(node->type == Node::TYPE_FREE);
9848 Node*
const frontNode = m_FreeList[level].front;
9849 if(frontNode == VMA_NULL)
9851 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9852 node->free.prev = node->free.next = VMA_NULL;
9853 m_FreeList[level].front = m_FreeList[level].back = node;
9857 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9858 node->free.prev = VMA_NULL;
9859 node->free.next = frontNode;
9860 frontNode->free.prev = node;
9861 m_FreeList[level].front = node;
9865 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
9867 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
9870 if(node->free.prev == VMA_NULL)
9872 VMA_ASSERT(m_FreeList[level].front == node);
9873 m_FreeList[level].front = node->free.next;
9877 Node*
const prevFreeNode = node->free.prev;
9878 VMA_ASSERT(prevFreeNode->free.next == node);
9879 prevFreeNode->free.next = node->free.next;
9883 if(node->free.next == VMA_NULL)
9885 VMA_ASSERT(m_FreeList[level].back == node);
9886 m_FreeList[level].back = node->free.prev;
9890 Node*
const nextFreeNode = node->free.next;
9891 VMA_ASSERT(nextFreeNode->free.prev == node);
9892 nextFreeNode->free.prev = node->free.prev;
9896 #if VMA_STATS_STRING_ENABLED 9897 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 9901 case Node::TYPE_FREE:
9902 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
9904 case Node::TYPE_ALLOCATION:
9906 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
9907 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9908 if(allocSize < levelNodeSize)
9910 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
9914 case Node::TYPE_SPLIT:
9916 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9917 const Node*
const leftChild = node->split.leftChild;
9918 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
9919 const Node*
const rightChild = leftChild->buddy;
9920 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
9927 #endif // #if VMA_STATS_STRING_ENABLED 9933 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
9934 m_pMetadata(VMA_NULL),
9935 m_MemoryTypeIndex(UINT32_MAX),
9937 m_hMemory(VK_NULL_HANDLE),
9939 m_pMappedData(VMA_NULL)
9943 void VmaDeviceMemoryBlock::Init(
9945 uint32_t newMemoryTypeIndex,
9946 VkDeviceMemory newMemory,
9947 VkDeviceSize newSize,
9951 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
9953 m_MemoryTypeIndex = newMemoryTypeIndex;
9955 m_hMemory = newMemory;
9960 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
9963 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
9969 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
9971 m_pMetadata->Init(newSize);
9974 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
9978 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
9980 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
9981 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
9982 m_hMemory = VK_NULL_HANDLE;
9984 vma_delete(allocator, m_pMetadata);
9985 m_pMetadata = VMA_NULL;
9988 bool VmaDeviceMemoryBlock::Validate()
const 9990 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
9991 (m_pMetadata->GetSize() != 0));
9993 return m_pMetadata->Validate();
9996 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
9998 void* pData =
nullptr;
9999 VkResult res = Map(hAllocator, 1, &pData);
10000 if(res != VK_SUCCESS)
10005 res = m_pMetadata->CheckCorruption(pData);
10007 Unmap(hAllocator, 1);
10012 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
10019 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10020 if(m_MapCount != 0)
10022 m_MapCount += count;
10023 VMA_ASSERT(m_pMappedData != VMA_NULL);
10024 if(ppData != VMA_NULL)
10026 *ppData = m_pMappedData;
10032 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
10033 hAllocator->m_hDevice,
10039 if(result == VK_SUCCESS)
10041 if(ppData != VMA_NULL)
10043 *ppData = m_pMappedData;
10045 m_MapCount = count;
10051 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
10058 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10059 if(m_MapCount >= count)
10061 m_MapCount -= count;
10062 if(m_MapCount == 0)
10064 m_pMappedData = VMA_NULL;
10065 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
10070 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
10074 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10076 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10077 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10080 VkResult res = Map(hAllocator, 1, &pData);
10081 if(res != VK_SUCCESS)
10086 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
10087 VmaWriteMagicValue(pData, allocOffset + allocSize);
10089 Unmap(hAllocator, 1);
10094 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10096 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10097 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10100 VkResult res = Map(hAllocator, 1, &pData);
10101 if(res != VK_SUCCESS)
10106 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
10108 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
10110 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
10112 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
10115 Unmap(hAllocator, 1);
10120 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
10125 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10126 hAllocation->GetBlock() ==
this);
10128 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10129 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
10130 hAllocator->m_hDevice,
10133 hAllocation->GetOffset());
10136 VkResult VmaDeviceMemoryBlock::BindImageMemory(
10141 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10142 hAllocation->GetBlock() ==
this);
10144 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10145 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
10146 hAllocator->m_hDevice,
10149 hAllocation->GetOffset());
10154 memset(&outInfo, 0,
sizeof(outInfo));
10173 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
10181 VmaPool_T::VmaPool_T(
10184 VkDeviceSize preferredBlockSize) :
10187 createInfo.memoryTypeIndex,
10188 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
10189 createInfo.minBlockCount,
10190 createInfo.maxBlockCount,
10192 createInfo.frameInUseCount,
10194 createInfo.blockSize != 0,
10200 VmaPool_T::~VmaPool_T()
10204 #if VMA_STATS_STRING_ENABLED 10206 #endif // #if VMA_STATS_STRING_ENABLED 10208 VmaBlockVector::VmaBlockVector(
10210 uint32_t memoryTypeIndex,
10211 VkDeviceSize preferredBlockSize,
10212 size_t minBlockCount,
10213 size_t maxBlockCount,
10214 VkDeviceSize bufferImageGranularity,
10215 uint32_t frameInUseCount,
10217 bool explicitBlockSize,
10218 uint32_t algorithm) :
10219 m_hAllocator(hAllocator),
10220 m_MemoryTypeIndex(memoryTypeIndex),
10221 m_PreferredBlockSize(preferredBlockSize),
10222 m_MinBlockCount(minBlockCount),
10223 m_MaxBlockCount(maxBlockCount),
10224 m_BufferImageGranularity(bufferImageGranularity),
10225 m_FrameInUseCount(frameInUseCount),
10226 m_IsCustomPool(isCustomPool),
10227 m_ExplicitBlockSize(explicitBlockSize),
10228 m_Algorithm(algorithm),
10229 m_HasEmptyBlock(false),
10230 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
10231 m_pDefragmentator(VMA_NULL),
10236 VmaBlockVector::~VmaBlockVector()
10238 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
10240 for(
size_t i = m_Blocks.size(); i--; )
10242 m_Blocks[i]->Destroy(m_hAllocator);
10243 vma_delete(m_hAllocator, m_Blocks[i]);
10247 VkResult VmaBlockVector::CreateMinBlocks()
10249 for(
size_t i = 0; i < m_MinBlockCount; ++i)
10251 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
10252 if(res != VK_SUCCESS)
10260 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
10262 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10264 const size_t blockCount = m_Blocks.size();
10273 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10275 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10276 VMA_ASSERT(pBlock);
10277 VMA_HEAVY_ASSERT(pBlock->Validate());
10278 pBlock->m_pMetadata->AddPoolStats(*pStats);
10282 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 10284 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10285 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
10286 (VMA_DEBUG_MARGIN > 0) &&
10287 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
10290 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
10292 VkResult VmaBlockVector::Allocate(
10294 uint32_t currentFrameIndex,
10296 VkDeviceSize alignment,
10298 VmaSuballocationType suballocType,
10299 size_t allocationCount,
10303 VkResult res = VK_SUCCESS;
10306 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10307 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
10309 res = AllocatePage(
10316 pAllocations + allocIndex);
10317 if(res != VK_SUCCESS)
10324 if(res != VK_SUCCESS)
10327 while(allocIndex--)
10329 Free(pAllocations[allocIndex]);
10331 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
10337 VkResult VmaBlockVector::AllocatePage(
10339 uint32_t currentFrameIndex,
10341 VkDeviceSize alignment,
10343 VmaSuballocationType suballocType,
10350 const bool canCreateNewBlock =
10352 (m_Blocks.size() < m_MaxBlockCount);
10359 canMakeOtherLost =
false;
10363 if(isUpperAddress &&
10366 return VK_ERROR_FEATURE_NOT_PRESENT;
10380 return VK_ERROR_FEATURE_NOT_PRESENT;
10384 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
10386 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10394 if(!canMakeOtherLost || canCreateNewBlock)
10403 if(!m_Blocks.empty())
10405 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
10406 VMA_ASSERT(pCurrBlock);
10407 VkResult res = AllocateFromBlock(
10418 if(res == VK_SUCCESS)
10420 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
10430 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10432 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10433 VMA_ASSERT(pCurrBlock);
10434 VkResult res = AllocateFromBlock(
10445 if(res == VK_SUCCESS)
10447 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10455 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10457 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10458 VMA_ASSERT(pCurrBlock);
10459 VkResult res = AllocateFromBlock(
10470 if(res == VK_SUCCESS)
10472 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10480 if(canCreateNewBlock)
10483 VkDeviceSize newBlockSize = m_PreferredBlockSize;
10484 uint32_t newBlockSizeShift = 0;
10485 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
10487 if(!m_ExplicitBlockSize)
10490 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
10491 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
10493 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10494 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
10496 newBlockSize = smallerNewBlockSize;
10497 ++newBlockSizeShift;
10506 size_t newBlockIndex = 0;
10507 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
10509 if(!m_ExplicitBlockSize)
10511 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
10513 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10514 if(smallerNewBlockSize >= size)
10516 newBlockSize = smallerNewBlockSize;
10517 ++newBlockSizeShift;
10518 res = CreateBlock(newBlockSize, &newBlockIndex);
10527 if(res == VK_SUCCESS)
10529 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
10530 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
10532 res = AllocateFromBlock(
10543 if(res == VK_SUCCESS)
10545 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
10551 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10558 if(canMakeOtherLost)
10560 uint32_t tryIndex = 0;
10561 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
10563 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
10564 VmaAllocationRequest bestRequest = {};
10565 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
10571 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10573 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10574 VMA_ASSERT(pCurrBlock);
10575 VmaAllocationRequest currRequest = {};
10576 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10579 m_BufferImageGranularity,
10588 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10589 if(pBestRequestBlock == VMA_NULL ||
10590 currRequestCost < bestRequestCost)
10592 pBestRequestBlock = pCurrBlock;
10593 bestRequest = currRequest;
10594 bestRequestCost = currRequestCost;
10596 if(bestRequestCost == 0)
10607 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10609 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10610 VMA_ASSERT(pCurrBlock);
10611 VmaAllocationRequest currRequest = {};
10612 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10615 m_BufferImageGranularity,
10624 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10625 if(pBestRequestBlock == VMA_NULL ||
10626 currRequestCost < bestRequestCost ||
10629 pBestRequestBlock = pCurrBlock;
10630 bestRequest = currRequest;
10631 bestRequestCost = currRequestCost;
10633 if(bestRequestCost == 0 ||
10643 if(pBestRequestBlock != VMA_NULL)
10647 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
10648 if(res != VK_SUCCESS)
10654 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
10660 if(pBestRequestBlock->m_pMetadata->IsEmpty())
10662 m_HasEmptyBlock =
false;
10665 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10666 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
10667 (*pAllocation)->InitBlockAllocation(
10670 bestRequest.offset,
10676 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
10677 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
10678 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
10679 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10681 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10683 if(IsCorruptionDetectionEnabled())
10685 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
10686 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10701 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
10703 return VK_ERROR_TOO_MANY_OBJECTS;
10707 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10710 void VmaBlockVector::Free(
10713 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
10717 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10719 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
10721 if(IsCorruptionDetectionEnabled())
10723 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
10724 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
10727 if(hAllocation->IsPersistentMap())
10729 pBlock->Unmap(m_hAllocator, 1);
10732 pBlock->m_pMetadata->Free(hAllocation);
10733 VMA_HEAVY_ASSERT(pBlock->Validate());
10735 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
10738 if(pBlock->m_pMetadata->IsEmpty())
10741 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
10743 pBlockToDelete = pBlock;
10749 m_HasEmptyBlock =
true;
10754 else if(m_HasEmptyBlock)
10756 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
10757 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
10759 pBlockToDelete = pLastBlock;
10760 m_Blocks.pop_back();
10761 m_HasEmptyBlock =
false;
10765 IncrementallySortBlocks();
10770 if(pBlockToDelete != VMA_NULL)
10772 VMA_DEBUG_LOG(
" Deleted empty allocation");
10773 pBlockToDelete->Destroy(m_hAllocator);
10774 vma_delete(m_hAllocator, pBlockToDelete);
10778 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 10780 VkDeviceSize result = 0;
10781 for(
size_t i = m_Blocks.size(); i--; )
10783 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
10784 if(result >= m_PreferredBlockSize)
10792 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
10794 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10796 if(m_Blocks[blockIndex] == pBlock)
10798 VmaVectorRemove(m_Blocks, blockIndex);
10805 void VmaBlockVector::IncrementallySortBlocks()
10810 for(
size_t i = 1; i < m_Blocks.size(); ++i)
10812 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
10814 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
10821 VkResult VmaBlockVector::AllocateFromBlock(
10822 VmaDeviceMemoryBlock* pBlock,
10824 uint32_t currentFrameIndex,
10826 VkDeviceSize alignment,
10829 VmaSuballocationType suballocType,
10838 VmaAllocationRequest currRequest = {};
10839 if(pBlock->m_pMetadata->CreateAllocationRequest(
10842 m_BufferImageGranularity,
10852 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
10856 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
10857 if(res != VK_SUCCESS)
10864 if(pBlock->m_pMetadata->IsEmpty())
10866 m_HasEmptyBlock =
false;
10869 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10870 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
10871 (*pAllocation)->InitBlockAllocation(
10874 currRequest.offset,
10880 VMA_HEAVY_ASSERT(pBlock->Validate());
10881 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
10882 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10884 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10886 if(IsCorruptionDetectionEnabled())
10888 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
10889 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10893 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10896 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
10898 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
10899 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
10900 allocInfo.allocationSize = blockSize;
10901 VkDeviceMemory mem = VK_NULL_HANDLE;
10902 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
10911 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
10916 allocInfo.allocationSize,
10920 m_Blocks.push_back(pBlock);
10921 if(pNewBlockIndex != VMA_NULL)
10923 *pNewBlockIndex = m_Blocks.size() - 1;
10929 #if VMA_STATS_STRING_ENABLED 10931 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
10933 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10935 json.BeginObject();
10939 json.WriteString(
"MemoryTypeIndex");
10940 json.WriteNumber(m_MemoryTypeIndex);
10942 json.WriteString(
"BlockSize");
10943 json.WriteNumber(m_PreferredBlockSize);
10945 json.WriteString(
"BlockCount");
10946 json.BeginObject(
true);
10947 if(m_MinBlockCount > 0)
10949 json.WriteString(
"Min");
10950 json.WriteNumber((uint64_t)m_MinBlockCount);
10952 if(m_MaxBlockCount < SIZE_MAX)
10954 json.WriteString(
"Max");
10955 json.WriteNumber((uint64_t)m_MaxBlockCount);
10957 json.WriteString(
"Cur");
10958 json.WriteNumber((uint64_t)m_Blocks.size());
10961 if(m_FrameInUseCount > 0)
10963 json.WriteString(
"FrameInUseCount");
10964 json.WriteNumber(m_FrameInUseCount);
10967 if(m_Algorithm != 0)
10969 json.WriteString(
"Algorithm");
10970 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
10975 json.WriteString(
"PreferredBlockSize");
10976 json.WriteNumber(m_PreferredBlockSize);
10979 json.WriteString(
"Blocks");
10980 json.BeginObject();
10981 for(
size_t i = 0; i < m_Blocks.size(); ++i)
10983 json.BeginString();
10984 json.ContinueString(m_Blocks[i]->GetId());
10987 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
10994 #endif // #if VMA_STATS_STRING_ENABLED 10996 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
10998 uint32_t currentFrameIndex)
11000 if(m_pDefragmentator == VMA_NULL)
11002 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
11005 currentFrameIndex);
11008 return m_pDefragmentator;
11011 VkResult VmaBlockVector::Defragment(
11013 VkDeviceSize& maxBytesToMove,
11014 uint32_t& maxAllocationsToMove)
11016 if(m_pDefragmentator == VMA_NULL)
11021 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11024 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
11027 if(pDefragmentationStats != VMA_NULL)
11029 const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
11030 const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
11031 pDefragmentationStats->
bytesMoved += bytesMoved;
11033 VMA_ASSERT(bytesMoved <= maxBytesToMove);
11034 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
11035 maxBytesToMove -= bytesMoved;
11036 maxAllocationsToMove -= allocationsMoved;
11040 m_HasEmptyBlock =
false;
11041 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11043 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11044 if(pBlock->m_pMetadata->IsEmpty())
11046 if(m_Blocks.size() > m_MinBlockCount)
11048 if(pDefragmentationStats != VMA_NULL)
11051 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
11054 VmaVectorRemove(m_Blocks, blockIndex);
11055 pBlock->Destroy(m_hAllocator);
11056 vma_delete(m_hAllocator, pBlock);
11060 m_HasEmptyBlock =
true;
11068 void VmaBlockVector::DestroyDefragmentator()
11070 if(m_pDefragmentator != VMA_NULL)
11072 vma_delete(m_hAllocator, m_pDefragmentator);
11073 m_pDefragmentator = VMA_NULL;
11077 void VmaBlockVector::MakePoolAllocationsLost(
11078 uint32_t currentFrameIndex,
11079 size_t* pLostAllocationCount)
11081 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11082 size_t lostAllocationCount = 0;
11083 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11085 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11086 VMA_ASSERT(pBlock);
11087 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
11089 if(pLostAllocationCount != VMA_NULL)
11091 *pLostAllocationCount = lostAllocationCount;
11095 VkResult VmaBlockVector::CheckCorruption()
11097 if(!IsCorruptionDetectionEnabled())
11099 return VK_ERROR_FEATURE_NOT_PRESENT;
11102 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11103 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11105 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11106 VMA_ASSERT(pBlock);
11107 VkResult res = pBlock->CheckCorruption(m_hAllocator);
11108 if(res != VK_SUCCESS)
11116 void VmaBlockVector::AddStats(
VmaStats* pStats)
11118 const uint32_t memTypeIndex = m_MemoryTypeIndex;
11119 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
11121 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11123 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11125 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11126 VMA_ASSERT(pBlock);
11127 VMA_HEAVY_ASSERT(pBlock->Validate());
11129 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
11130 VmaAddStatInfo(pStats->
total, allocationStatInfo);
11131 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
11132 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11139 VmaDefragmentator::VmaDefragmentator(
11141 VmaBlockVector* pBlockVector,
11142 uint32_t currentFrameIndex) :
11143 m_hAllocator(hAllocator),
11144 m_pBlockVector(pBlockVector),
11145 m_CurrentFrameIndex(currentFrameIndex),
11147 m_AllocationsMoved(0),
11148 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
11149 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
11151 VMA_ASSERT(pBlockVector->GetAlgorithm() == 0);
11154 VmaDefragmentator::~VmaDefragmentator()
11156 for(
size_t i = m_Blocks.size(); i--; )
11158 vma_delete(m_hAllocator, m_Blocks[i]);
11162 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
11164 AllocationInfo allocInfo;
11165 allocInfo.m_hAllocation = hAlloc;
11166 allocInfo.m_pChanged = pChanged;
11167 m_Allocations.push_back(allocInfo);
11170 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
11173 if(m_pMappedDataForDefragmentation)
11175 *ppMappedData = m_pMappedDataForDefragmentation;
11180 if(m_pBlock->GetMappedData())
11182 *ppMappedData = m_pBlock->GetMappedData();
11187 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
11188 *ppMappedData = m_pMappedDataForDefragmentation;
11192 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
11194 if(m_pMappedDataForDefragmentation != VMA_NULL)
11196 m_pBlock->Unmap(hAllocator, 1);
11200 VkResult VmaDefragmentator::DefragmentRound(
11201 VkDeviceSize maxBytesToMove,
11202 uint32_t maxAllocationsToMove)
11204 if(m_Blocks.empty())
11209 size_t srcBlockIndex = m_Blocks.size() - 1;
11210 size_t srcAllocIndex = SIZE_MAX;
11216 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
11218 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
11221 if(srcBlockIndex == 0)
11228 srcAllocIndex = SIZE_MAX;
11233 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
11237 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
11238 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
11240 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
11241 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
11242 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
11243 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
11246 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
11248 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
11249 VmaAllocationRequest dstAllocRequest;
11250 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
11251 m_CurrentFrameIndex,
11252 m_pBlockVector->GetFrameInUseCount(),
11253 m_pBlockVector->GetBufferImageGranularity(),
11260 &dstAllocRequest) &&
11262 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
11264 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
11267 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
11268 (m_BytesMoved + size > maxBytesToMove))
11270 return VK_INCOMPLETE;
11273 void* pDstMappedData = VMA_NULL;
11274 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
11275 if(res != VK_SUCCESS)
11280 void* pSrcMappedData = VMA_NULL;
11281 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
11282 if(res != VK_SUCCESS)
11289 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
11290 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
11291 static_cast<size_t>(size));
11293 if(VMA_DEBUG_MARGIN > 0)
11295 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
11296 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
11299 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
11304 allocInfo.m_hAllocation);
11305 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
11307 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
11309 if(allocInfo.m_pChanged != VMA_NULL)
11311 *allocInfo.m_pChanged = VK_TRUE;
11314 ++m_AllocationsMoved;
11315 m_BytesMoved += size;
11317 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
11325 if(srcAllocIndex > 0)
11331 if(srcBlockIndex > 0)
11334 srcAllocIndex = SIZE_MAX;
11344 VkResult VmaDefragmentator::Defragment(
11345 VkDeviceSize maxBytesToMove,
11346 uint32_t maxAllocationsToMove)
11348 if(m_Allocations.empty())
11354 const size_t blockCount = m_pBlockVector->m_Blocks.size();
11355 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11357 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
11358 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
11359 m_Blocks.push_back(pBlockInfo);
11363 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
11366 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
11368 AllocationInfo& allocInfo = m_Allocations[blockIndex];
11370 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11372 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
11373 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
11374 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
11376 (*it)->m_Allocations.push_back(allocInfo);
11384 m_Allocations.clear();
11386 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11388 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
11389 pBlockInfo->CalcHasNonMovableAllocations();
11390 pBlockInfo->SortAllocationsBySizeDescecnding();
11394 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
11397 VkResult result = VK_SUCCESS;
11398 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
11400 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
11404 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11406 m_Blocks[blockIndex]->Unmap(m_hAllocator);
11412 bool VmaDefragmentator::MoveMakesSense(
11413 size_t dstBlockIndex, VkDeviceSize dstOffset,
11414 size_t srcBlockIndex, VkDeviceSize srcOffset)
11416 if(dstBlockIndex < srcBlockIndex)
11420 if(dstBlockIndex > srcBlockIndex)
11424 if(dstOffset < srcOffset)
11434 #if VMA_RECORDING_ENABLED 11436 VmaRecorder::VmaRecorder() :
11441 m_StartCounter(INT64_MAX)
11447 m_UseMutex = useMutex;
11448 m_Flags = settings.
flags;
11450 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
11451 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
11454 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
11457 return VK_ERROR_INITIALIZATION_FAILED;
11461 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
11462 fprintf(m_File,
"%s\n",
"1,3");
11467 VmaRecorder::~VmaRecorder()
11469 if(m_File != VMA_NULL)
11475 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
11477 CallParams callParams;
11478 GetBasicParams(callParams);
11480 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11481 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
11485 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
11487 CallParams callParams;
11488 GetBasicParams(callParams);
11490 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11491 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
11497 CallParams callParams;
11498 GetBasicParams(callParams);
11500 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11501 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
11512 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
11514 CallParams callParams;
11515 GetBasicParams(callParams);
11517 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11518 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
11523 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
11524 const VkMemoryRequirements& vkMemReq,
11528 CallParams callParams;
11529 GetBasicParams(callParams);
11531 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11532 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11533 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11535 vkMemReq.alignment,
11536 vkMemReq.memoryTypeBits,
11544 userDataStr.GetString());
11548 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
11549 const VkMemoryRequirements& vkMemReq,
11550 bool requiresDedicatedAllocation,
11551 bool prefersDedicatedAllocation,
11555 CallParams callParams;
11556 GetBasicParams(callParams);
11558 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11559 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11560 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11562 vkMemReq.alignment,
11563 vkMemReq.memoryTypeBits,
11564 requiresDedicatedAllocation ? 1 : 0,
11565 prefersDedicatedAllocation ? 1 : 0,
11573 userDataStr.GetString());
11577 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
11578 const VkMemoryRequirements& vkMemReq,
11579 bool requiresDedicatedAllocation,
11580 bool prefersDedicatedAllocation,
11584 CallParams callParams;
11585 GetBasicParams(callParams);
11587 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11588 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11589 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11591 vkMemReq.alignment,
11592 vkMemReq.memoryTypeBits,
11593 requiresDedicatedAllocation ? 1 : 0,
11594 prefersDedicatedAllocation ? 1 : 0,
11602 userDataStr.GetString());
11606 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
11609 CallParams callParams;
11610 GetBasicParams(callParams);
11612 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11613 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11618 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
11620 const void* pUserData)
11622 CallParams callParams;
11623 GetBasicParams(callParams);
11625 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11626 UserDataString userDataStr(
11629 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11631 userDataStr.GetString());
11635 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
11638 CallParams callParams;
11639 GetBasicParams(callParams);
11641 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11642 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11647 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
11650 CallParams callParams;
11651 GetBasicParams(callParams);
11653 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11654 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11659 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
11662 CallParams callParams;
11663 GetBasicParams(callParams);
11665 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11666 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11671 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
11672 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11674 CallParams callParams;
11675 GetBasicParams(callParams);
11677 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11678 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11685 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
11686 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11688 CallParams callParams;
11689 GetBasicParams(callParams);
11691 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11692 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11699 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
11700 const VkBufferCreateInfo& bufCreateInfo,
11704 CallParams callParams;
11705 GetBasicParams(callParams);
11707 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11708 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11709 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11710 bufCreateInfo.flags,
11711 bufCreateInfo.size,
11712 bufCreateInfo.usage,
11713 bufCreateInfo.sharingMode,
11714 allocCreateInfo.
flags,
11715 allocCreateInfo.
usage,
11719 allocCreateInfo.
pool,
11721 userDataStr.GetString());
11725 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
11726 const VkImageCreateInfo& imageCreateInfo,
11730 CallParams callParams;
11731 GetBasicParams(callParams);
11733 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11734 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11735 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11736 imageCreateInfo.flags,
11737 imageCreateInfo.imageType,
11738 imageCreateInfo.format,
11739 imageCreateInfo.extent.width,
11740 imageCreateInfo.extent.height,
11741 imageCreateInfo.extent.depth,
11742 imageCreateInfo.mipLevels,
11743 imageCreateInfo.arrayLayers,
11744 imageCreateInfo.samples,
11745 imageCreateInfo.tiling,
11746 imageCreateInfo.usage,
11747 imageCreateInfo.sharingMode,
11748 imageCreateInfo.initialLayout,
11749 allocCreateInfo.
flags,
11750 allocCreateInfo.
usage,
11754 allocCreateInfo.
pool,
11756 userDataStr.GetString());
11760 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
11763 CallParams callParams;
11764 GetBasicParams(callParams);
11766 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11767 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
11772 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
11775 CallParams callParams;
11776 GetBasicParams(callParams);
11778 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11779 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
11784 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
11787 CallParams callParams;
11788 GetBasicParams(callParams);
11790 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11791 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11796 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
11799 CallParams callParams;
11800 GetBasicParams(callParams);
11802 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11803 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
11808 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
11811 CallParams callParams;
11812 GetBasicParams(callParams);
11814 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11815 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
11822 if(pUserData != VMA_NULL)
11826 m_Str = (
const char*)pUserData;
11830 sprintf_s(m_PtrStr,
"%p", pUserData);
11840 void VmaRecorder::WriteConfiguration(
11841 const VkPhysicalDeviceProperties& devProps,
11842 const VkPhysicalDeviceMemoryProperties& memProps,
11843 bool dedicatedAllocationExtensionEnabled)
11845 fprintf(m_File,
"Config,Begin\n");
11847 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
11848 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
11849 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
11850 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
11851 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
11852 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
11854 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
11855 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
11856 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
11858 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
11859 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
11861 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
11862 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
11864 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
11865 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
11867 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
11868 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
11871 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
11873 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
11874 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
11875 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
11876 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
11877 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
11878 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
11879 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
11880 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
11881 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
11883 fprintf(m_File,
"Config,End\n");
11886 void VmaRecorder::GetBasicParams(CallParams& outParams)
11888 outParams.threadId = GetCurrentThreadId();
11890 LARGE_INTEGER counter;
11891 QueryPerformanceCounter(&counter);
11892 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
11895 void VmaRecorder::Flush()
11903 #endif // #if VMA_RECORDING_ENABLED 11911 m_hDevice(pCreateInfo->device),
11912 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
11913 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
11914 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
11915 m_PreferredLargeHeapBlockSize(0),
11916 m_PhysicalDevice(pCreateInfo->physicalDevice),
11917 m_CurrentFrameIndex(0),
11918 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
11921 ,m_pRecorder(VMA_NULL)
11924 if(VMA_DEBUG_DETECT_CORRUPTION)
11927 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
11932 #if !(VMA_DEDICATED_ALLOCATION) 11935 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
11939 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
11940 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
11941 memset(&m_MemProps, 0,
sizeof(m_MemProps));
11943 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
11944 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
11946 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
11948 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
11959 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
11960 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
11962 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
11963 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
11964 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
11965 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
11972 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
11974 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
11975 if(limit != VK_WHOLE_SIZE)
11977 m_HeapSizeLimit[heapIndex] = limit;
11978 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
11980 m_MemProps.memoryHeaps[heapIndex].size = limit;
11986 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11988 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
11990 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
11993 preferredBlockSize,
11996 GetBufferImageGranularity(),
12003 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
12010 VkResult res = VK_SUCCESS;
12015 #if VMA_RECORDING_ENABLED 12016 m_pRecorder = vma_new(
this, VmaRecorder)();
12018 if(res != VK_SUCCESS)
12022 m_pRecorder->WriteConfiguration(
12023 m_PhysicalDeviceProperties,
12025 m_UseKhrDedicatedAllocation);
12026 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
12028 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
12029 return VK_ERROR_FEATURE_NOT_PRESENT;
12036 VmaAllocator_T::~VmaAllocator_T()
12038 #if VMA_RECORDING_ENABLED 12039 if(m_pRecorder != VMA_NULL)
12041 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
12042 vma_delete(
this, m_pRecorder);
12046 VMA_ASSERT(m_Pools.empty());
12048 for(
size_t i = GetMemoryTypeCount(); i--; )
12050 vma_delete(
this, m_pDedicatedAllocations[i]);
12051 vma_delete(
this, m_pBlockVectors[i]);
12055 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
12057 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 12058 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
12059 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
12060 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
12061 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
12062 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
12063 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
12064 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
12065 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
12066 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
12067 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
12068 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
12069 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
12070 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
12071 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
12072 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
12073 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
12074 #if VMA_DEDICATED_ALLOCATION 12075 if(m_UseKhrDedicatedAllocation)
12077 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
12078 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
12079 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
12080 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
12082 #endif // #if VMA_DEDICATED_ALLOCATION 12083 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 12085 #define VMA_COPY_IF_NOT_NULL(funcName) \ 12086 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 12088 if(pVulkanFunctions != VMA_NULL)
12090 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
12091 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
12092 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
12093 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
12094 VMA_COPY_IF_NOT_NULL(vkMapMemory);
12095 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
12096 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
12097 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
12098 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
12099 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
12100 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
12101 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
12102 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
12103 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
12104 VMA_COPY_IF_NOT_NULL(vkCreateImage);
12105 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
12106 #if VMA_DEDICATED_ALLOCATION 12107 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
12108 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
12112 #undef VMA_COPY_IF_NOT_NULL 12116 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
12117 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
12118 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
12119 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
12120 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
12121 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
12122 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
12123 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
12124 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
12125 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
12126 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
12127 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
12128 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
12129 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
12130 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
12131 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
12132 #if VMA_DEDICATED_ALLOCATION 12133 if(m_UseKhrDedicatedAllocation)
12135 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
12136 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
12141 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
12143 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12144 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
12145 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
12146 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
12149 VkResult VmaAllocator_T::AllocateMemoryOfType(
12151 VkDeviceSize alignment,
12152 bool dedicatedAllocation,
12153 VkBuffer dedicatedBuffer,
12154 VkImage dedicatedImage,
12156 uint32_t memTypeIndex,
12157 VmaSuballocationType suballocType,
12158 size_t allocationCount,
12161 VMA_ASSERT(pAllocations != VMA_NULL);
12162 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, vkMemReq.size);
12168 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12173 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
12174 VMA_ASSERT(blockVector);
12176 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
12177 bool preferDedicatedMemory =
12178 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
12179 dedicatedAllocation ||
12181 size > preferredBlockSize / 2;
12183 if(preferDedicatedMemory &&
12185 finalCreateInfo.
pool == VK_NULL_HANDLE)
12194 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12198 return AllocateDedicatedMemory(
12213 VkResult res = blockVector->Allocate(
12215 m_CurrentFrameIndex.load(),
12222 if(res == VK_SUCCESS)
12230 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12234 res = AllocateDedicatedMemory(
12240 finalCreateInfo.pUserData,
12245 if(res == VK_SUCCESS)
12248 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
12254 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12261 VkResult VmaAllocator_T::AllocateDedicatedMemory(
12263 VmaSuballocationType suballocType,
12264 uint32_t memTypeIndex,
12266 bool isUserDataString,
12268 VkBuffer dedicatedBuffer,
12269 VkImage dedicatedImage,
12270 size_t allocationCount,
12273 VMA_ASSERT(allocationCount > 0 && pAllocations);
12275 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12276 allocInfo.memoryTypeIndex = memTypeIndex;
12277 allocInfo.allocationSize = size;
12279 #if VMA_DEDICATED_ALLOCATION 12280 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
12281 if(m_UseKhrDedicatedAllocation)
12283 if(dedicatedBuffer != VK_NULL_HANDLE)
12285 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
12286 dedicatedAllocInfo.buffer = dedicatedBuffer;
12287 allocInfo.pNext = &dedicatedAllocInfo;
12289 else if(dedicatedImage != VK_NULL_HANDLE)
12291 dedicatedAllocInfo.image = dedicatedImage;
12292 allocInfo.pNext = &dedicatedAllocInfo;
12295 #endif // #if VMA_DEDICATED_ALLOCATION 12299 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12301 res = AllocateDedicatedMemoryPage(
12309 pAllocations + allocIndex);
12310 if(res != VK_SUCCESS)
12316 if(res == VK_SUCCESS)
12320 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12321 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12322 VMA_ASSERT(pDedicatedAllocations);
12323 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12325 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
12329 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
12334 while(allocIndex--)
12337 VkDeviceMemory hMemory = currAlloc->GetMemory();
12349 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
12351 currAlloc->SetUserData(
this, VMA_NULL);
12352 vma_delete(
this, currAlloc);
12355 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12361 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
12363 VmaSuballocationType suballocType,
12364 uint32_t memTypeIndex,
12365 const VkMemoryAllocateInfo& allocInfo,
12367 bool isUserDataString,
12371 VkDeviceMemory hMemory = VK_NULL_HANDLE;
12372 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
12375 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12379 void* pMappedData = VMA_NULL;
12382 res = (*m_VulkanFunctions.vkMapMemory)(
12391 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
12392 FreeVulkanMemory(memTypeIndex, size, hMemory);
12397 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
12398 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
12399 (*pAllocation)->SetUserData(
this, pUserData);
12400 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12402 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12408 void VmaAllocator_T::GetBufferMemoryRequirements(
12410 VkMemoryRequirements& memReq,
12411 bool& requiresDedicatedAllocation,
12412 bool& prefersDedicatedAllocation)
const 12414 #if VMA_DEDICATED_ALLOCATION 12415 if(m_UseKhrDedicatedAllocation)
12417 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
12418 memReqInfo.buffer = hBuffer;
12420 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12422 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12423 memReq2.pNext = &memDedicatedReq;
12425 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12427 memReq = memReq2.memoryRequirements;
12428 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12429 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12432 #endif // #if VMA_DEDICATED_ALLOCATION 12434 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
12435 requiresDedicatedAllocation =
false;
12436 prefersDedicatedAllocation =
false;
12440 void VmaAllocator_T::GetImageMemoryRequirements(
12442 VkMemoryRequirements& memReq,
12443 bool& requiresDedicatedAllocation,
12444 bool& prefersDedicatedAllocation)
const 12446 #if VMA_DEDICATED_ALLOCATION 12447 if(m_UseKhrDedicatedAllocation)
12449 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
12450 memReqInfo.image = hImage;
12452 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12454 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12455 memReq2.pNext = &memDedicatedReq;
12457 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12459 memReq = memReq2.memoryRequirements;
12460 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12461 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12464 #endif // #if VMA_DEDICATED_ALLOCATION 12466 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
12467 requiresDedicatedAllocation =
false;
12468 prefersDedicatedAllocation =
false;
12472 VkResult VmaAllocator_T::AllocateMemory(
12473 const VkMemoryRequirements& vkMemReq,
12474 bool requiresDedicatedAllocation,
12475 bool prefersDedicatedAllocation,
12476 VkBuffer dedicatedBuffer,
12477 VkImage dedicatedImage,
12479 VmaSuballocationType suballocType,
12480 size_t allocationCount,
12483 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12485 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
12490 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
12491 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12496 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
12497 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12499 if(requiresDedicatedAllocation)
12503 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
12504 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12506 if(createInfo.
pool != VK_NULL_HANDLE)
12508 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
12509 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12512 if((createInfo.
pool != VK_NULL_HANDLE) &&
12515 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
12516 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12519 if(createInfo.
pool != VK_NULL_HANDLE)
12521 const VkDeviceSize alignmentForPool = VMA_MAX(
12522 vkMemReq.alignment,
12523 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
12524 return createInfo.
pool->m_BlockVector.Allocate(
12526 m_CurrentFrameIndex.load(),
12537 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
12538 uint32_t memTypeIndex = UINT32_MAX;
12540 if(res == VK_SUCCESS)
12542 VkDeviceSize alignmentForMemType = VMA_MAX(
12543 vkMemReq.alignment,
12544 GetMemoryTypeMinAlignment(memTypeIndex));
12546 res = AllocateMemoryOfType(
12548 alignmentForMemType,
12549 requiresDedicatedAllocation || prefersDedicatedAllocation,
12558 if(res == VK_SUCCESS)
12568 memoryTypeBits &= ~(1u << memTypeIndex);
12571 if(res == VK_SUCCESS)
12573 alignmentForMemType = VMA_MAX(
12574 vkMemReq.alignment,
12575 GetMemoryTypeMinAlignment(memTypeIndex));
12577 res = AllocateMemoryOfType(
12579 alignmentForMemType,
12580 requiresDedicatedAllocation || prefersDedicatedAllocation,
12589 if(res == VK_SUCCESS)
12599 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12610 void VmaAllocator_T::FreeMemory(
12611 size_t allocationCount,
12614 VMA_ASSERT(pAllocations);
12616 for(
size_t allocIndex = allocationCount; allocIndex--; )
12620 if(allocation != VK_NULL_HANDLE)
12622 if(TouchAllocation(allocation))
12624 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12626 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
12629 switch(allocation->GetType())
12631 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12633 VmaBlockVector* pBlockVector = VMA_NULL;
12634 VmaPool hPool = allocation->GetPool();
12635 if(hPool != VK_NULL_HANDLE)
12637 pBlockVector = &hPool->m_BlockVector;
12641 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12642 pBlockVector = m_pBlockVectors[memTypeIndex];
12644 pBlockVector->Free(allocation);
12647 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12648 FreeDedicatedMemory(allocation);
12655 allocation->SetUserData(
this, VMA_NULL);
12656 vma_delete(
this, allocation);
12661 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
12664 InitStatInfo(pStats->
total);
12665 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
12667 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12671 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12673 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12674 VMA_ASSERT(pBlockVector);
12675 pBlockVector->AddStats(pStats);
12680 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12681 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12683 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
12688 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12690 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12691 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12692 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12693 VMA_ASSERT(pDedicatedAllocVector);
12694 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
12697 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
12698 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12699 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12700 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12705 VmaPostprocessCalcStatInfo(pStats->
total);
12706 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
12707 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
12708 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
12709 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
12712 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
12714 VkResult VmaAllocator_T::Defragment(
12716 size_t allocationCount,
12717 VkBool32* pAllocationsChanged,
12721 if(pAllocationsChanged != VMA_NULL)
12723 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
12725 if(pDefragmentationStats != VMA_NULL)
12727 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
12730 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
12732 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
12734 const size_t poolCount = m_Pools.size();
12737 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12740 VMA_ASSERT(hAlloc);
12741 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
12743 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12744 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
12746 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
12748 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
12750 VmaBlockVector* pAllocBlockVector = VMA_NULL;
12752 const VmaPool hAllocPool = hAlloc->GetPool();
12754 if(hAllocPool != VK_NULL_HANDLE)
12757 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
12759 pAllocBlockVector = &hAllocPool->m_BlockVector;
12765 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
12768 if(pAllocBlockVector != VMA_NULL)
12770 VmaDefragmentator*
const pDefragmentator =
12771 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
12772 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
12773 &pAllocationsChanged[allocIndex] : VMA_NULL;
12774 pDefragmentator->AddAllocation(hAlloc, pChanged);
12779 VkResult result = VK_SUCCESS;
12783 VkDeviceSize maxBytesToMove = SIZE_MAX;
12784 uint32_t maxAllocationsToMove = UINT32_MAX;
12785 if(pDefragmentationInfo != VMA_NULL)
12792 for(uint32_t memTypeIndex = 0;
12793 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
12797 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12799 result = m_pBlockVectors[memTypeIndex]->Defragment(
12800 pDefragmentationStats,
12802 maxAllocationsToMove);
12807 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
12809 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
12810 pDefragmentationStats,
12812 maxAllocationsToMove);
12818 for(
size_t poolIndex = poolCount; poolIndex--; )
12820 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
12824 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
12826 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12828 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
12837 if(hAllocation->CanBecomeLost())
12843 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12844 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12847 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12851 pAllocationInfo->
offset = 0;
12852 pAllocationInfo->
size = hAllocation->GetSize();
12854 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12857 else if(localLastUseFrameIndex == localCurrFrameIndex)
12859 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12860 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12861 pAllocationInfo->
offset = hAllocation->GetOffset();
12862 pAllocationInfo->
size = hAllocation->GetSize();
12864 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12869 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12871 localLastUseFrameIndex = localCurrFrameIndex;
12878 #if VMA_STATS_STRING_ENABLED 12879 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12880 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12883 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12884 if(localLastUseFrameIndex == localCurrFrameIndex)
12890 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12892 localLastUseFrameIndex = localCurrFrameIndex;
12898 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12899 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12900 pAllocationInfo->
offset = hAllocation->GetOffset();
12901 pAllocationInfo->
size = hAllocation->GetSize();
12902 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
12903 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12907 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
12910 if(hAllocation->CanBecomeLost())
12912 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12913 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12916 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12920 else if(localLastUseFrameIndex == localCurrFrameIndex)
12926 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12928 localLastUseFrameIndex = localCurrFrameIndex;
12935 #if VMA_STATS_STRING_ENABLED 12936 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12937 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12940 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12941 if(localLastUseFrameIndex == localCurrFrameIndex)
12947 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12949 localLastUseFrameIndex = localCurrFrameIndex;
12961 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
12971 return VK_ERROR_INITIALIZATION_FAILED;
12974 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
12976 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
12978 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
12979 if(res != VK_SUCCESS)
12981 vma_delete(
this, *pPool);
12988 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12989 (*pPool)->SetId(m_NextPoolId++);
12990 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
12996 void VmaAllocator_T::DestroyPool(
VmaPool pool)
13000 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13001 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
13002 VMA_ASSERT(success &&
"Pool not found in Allocator.");
13005 vma_delete(
this, pool);
13010 pool->m_BlockVector.GetPoolStats(pPoolStats);
13013 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
13015 m_CurrentFrameIndex.store(frameIndex);
13018 void VmaAllocator_T::MakePoolAllocationsLost(
13020 size_t* pLostAllocationCount)
13022 hPool->m_BlockVector.MakePoolAllocationsLost(
13023 m_CurrentFrameIndex.load(),
13024 pLostAllocationCount);
13027 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
13029 return hPool->m_BlockVector.CheckCorruption();
13032 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
13034 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
13037 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13039 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
13041 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
13042 VMA_ASSERT(pBlockVector);
13043 VkResult localRes = pBlockVector->CheckCorruption();
13046 case VK_ERROR_FEATURE_NOT_PRESENT:
13049 finalRes = VK_SUCCESS;
13059 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13060 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
13062 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
13064 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
13067 case VK_ERROR_FEATURE_NOT_PRESENT:
13070 finalRes = VK_SUCCESS;
13082 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
13084 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
13085 (*pAllocation)->InitLost();
13088 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
13090 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
13093 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
13095 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
13096 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
13098 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
13099 if(res == VK_SUCCESS)
13101 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
13106 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
13111 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
13114 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
13116 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
13122 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
13124 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
13126 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
13129 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
13131 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
13132 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
13134 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
13135 m_HeapSizeLimit[heapIndex] += size;
13139 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
13141 if(hAllocation->CanBecomeLost())
13143 return VK_ERROR_MEMORY_MAP_FAILED;
13146 switch(hAllocation->GetType())
13148 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13150 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
13151 char *pBytes = VMA_NULL;
13152 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
13153 if(res == VK_SUCCESS)
13155 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
13156 hAllocation->BlockAllocMap();
13160 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13161 return hAllocation->DedicatedAllocMap(
this, ppData);
13164 return VK_ERROR_MEMORY_MAP_FAILED;
13170 switch(hAllocation->GetType())
13172 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13174 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
13175 hAllocation->BlockAllocUnmap();
13176 pBlock->Unmap(
this, 1);
13179 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13180 hAllocation->DedicatedAllocUnmap(
this);
13187 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
13189 VkResult res = VK_SUCCESS;
13190 switch(hAllocation->GetType())
13192 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13193 res = GetVulkanFunctions().vkBindBufferMemory(
13196 hAllocation->GetMemory(),
13199 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13201 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13202 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
13203 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
13212 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
13214 VkResult res = VK_SUCCESS;
13215 switch(hAllocation->GetType())
13217 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13218 res = GetVulkanFunctions().vkBindImageMemory(
13221 hAllocation->GetMemory(),
13224 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13226 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13227 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
13228 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
13237 void VmaAllocator_T::FlushOrInvalidateAllocation(
13239 VkDeviceSize offset, VkDeviceSize size,
13240 VMA_CACHE_OPERATION op)
13242 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
13243 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
13245 const VkDeviceSize allocationSize = hAllocation->GetSize();
13246 VMA_ASSERT(offset <= allocationSize);
13248 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13250 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13251 memRange.memory = hAllocation->GetMemory();
13253 switch(hAllocation->GetType())
13255 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13256 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13257 if(size == VK_WHOLE_SIZE)
13259 memRange.size = allocationSize - memRange.offset;
13263 VMA_ASSERT(offset + size <= allocationSize);
13264 memRange.size = VMA_MIN(
13265 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
13266 allocationSize - memRange.offset);
13270 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13273 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13274 if(size == VK_WHOLE_SIZE)
13276 size = allocationSize - offset;
13280 VMA_ASSERT(offset + size <= allocationSize);
13282 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
13285 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
13286 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
13287 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
13288 memRange.offset += allocationOffset;
13289 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
13300 case VMA_CACHE_FLUSH:
13301 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
13303 case VMA_CACHE_INVALIDATE:
13304 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
13313 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
13315 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
13317 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
13319 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13320 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
13321 VMA_ASSERT(pDedicatedAllocations);
13322 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
13323 VMA_ASSERT(success);
13326 VkDeviceMemory hMemory = allocation->GetMemory();
13338 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
13340 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
13343 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
13345 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
13346 !hAllocation->CanBecomeLost() &&
13347 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13349 void* pData = VMA_NULL;
13350 VkResult res = Map(hAllocation, &pData);
13351 if(res == VK_SUCCESS)
13353 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
13354 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
13355 Unmap(hAllocation);
13359 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
13364 #if VMA_STATS_STRING_ENABLED 13366 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
13368 bool dedicatedAllocationsStarted =
false;
13369 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13371 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13372 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
13373 VMA_ASSERT(pDedicatedAllocVector);
13374 if(pDedicatedAllocVector->empty() ==
false)
13376 if(dedicatedAllocationsStarted ==
false)
13378 dedicatedAllocationsStarted =
true;
13379 json.WriteString(
"DedicatedAllocations");
13380 json.BeginObject();
13383 json.BeginString(
"Type ");
13384 json.ContinueString(memTypeIndex);
13389 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
13391 json.BeginObject(
true);
13393 hAlloc->PrintParameters(json);
13400 if(dedicatedAllocationsStarted)
13406 bool allocationsStarted =
false;
13407 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13409 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
13411 if(allocationsStarted ==
false)
13413 allocationsStarted =
true;
13414 json.WriteString(
"DefaultPools");
13415 json.BeginObject();
13418 json.BeginString(
"Type ");
13419 json.ContinueString(memTypeIndex);
13422 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
13425 if(allocationsStarted)
13433 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13434 const size_t poolCount = m_Pools.size();
13437 json.WriteString(
"Pools");
13438 json.BeginObject();
13439 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13441 json.BeginString();
13442 json.ContinueString(m_Pools[poolIndex]->GetId());
13445 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
13452 #endif // #if VMA_STATS_STRING_ENABLED 13461 VMA_ASSERT(pCreateInfo && pAllocator);
13462 VMA_DEBUG_LOG(
"vmaCreateAllocator");
13464 return (*pAllocator)->Init(pCreateInfo);
13470 if(allocator != VK_NULL_HANDLE)
13472 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
13473 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
13474 vma_delete(&allocationCallbacks, allocator);
13480 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
13482 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
13483 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
13488 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
13490 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
13491 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
13496 uint32_t memoryTypeIndex,
13497 VkMemoryPropertyFlags* pFlags)
13499 VMA_ASSERT(allocator && pFlags);
13500 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
13501 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
13506 uint32_t frameIndex)
13508 VMA_ASSERT(allocator);
13509 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
13511 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13513 allocator->SetCurrentFrameIndex(frameIndex);
13520 VMA_ASSERT(allocator && pStats);
13521 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13522 allocator->CalculateStats(pStats);
13525 #if VMA_STATS_STRING_ENABLED 13529 char** ppStatsString,
13530 VkBool32 detailedMap)
13532 VMA_ASSERT(allocator && ppStatsString);
13533 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13535 VmaStringBuilder sb(allocator);
13537 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
13538 json.BeginObject();
13541 allocator->CalculateStats(&stats);
13543 json.WriteString(
"Total");
13544 VmaPrintStatInfo(json, stats.
total);
13546 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
13548 json.BeginString(
"Heap ");
13549 json.ContinueString(heapIndex);
13551 json.BeginObject();
13553 json.WriteString(
"Size");
13554 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
13556 json.WriteString(
"Flags");
13557 json.BeginArray(
true);
13558 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
13560 json.WriteString(
"DEVICE_LOCAL");
13566 json.WriteString(
"Stats");
13567 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
13570 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
13572 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
13574 json.BeginString(
"Type ");
13575 json.ContinueString(typeIndex);
13578 json.BeginObject();
13580 json.WriteString(
"Flags");
13581 json.BeginArray(
true);
13582 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
13583 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
13585 json.WriteString(
"DEVICE_LOCAL");
13587 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13589 json.WriteString(
"HOST_VISIBLE");
13591 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
13593 json.WriteString(
"HOST_COHERENT");
13595 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
13597 json.WriteString(
"HOST_CACHED");
13599 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
13601 json.WriteString(
"LAZILY_ALLOCATED");
13607 json.WriteString(
"Stats");
13608 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
13617 if(detailedMap == VK_TRUE)
13619 allocator->PrintDetailedMap(json);
13625 const size_t len = sb.GetLength();
13626 char*
const pChars = vma_new_array(allocator,
char, len + 1);
13629 memcpy(pChars, sb.GetData(), len);
13631 pChars[len] =
'\0';
13632 *ppStatsString = pChars;
13637 char* pStatsString)
13639 if(pStatsString != VMA_NULL)
13641 VMA_ASSERT(allocator);
13642 size_t len = strlen(pStatsString);
13643 vma_delete_array(allocator, pStatsString, len + 1);
13647 #endif // #if VMA_STATS_STRING_ENABLED 13654 uint32_t memoryTypeBits,
13656 uint32_t* pMemoryTypeIndex)
13658 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13659 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13660 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13667 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
13668 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
13673 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13677 switch(pAllocationCreateInfo->
usage)
13682 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13684 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13688 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13691 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13692 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13694 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13698 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13699 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
13705 *pMemoryTypeIndex = UINT32_MAX;
13706 uint32_t minCost = UINT32_MAX;
13707 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
13708 memTypeIndex < allocator->GetMemoryTypeCount();
13709 ++memTypeIndex, memTypeBit <<= 1)
13712 if((memTypeBit & memoryTypeBits) != 0)
13714 const VkMemoryPropertyFlags currFlags =
13715 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
13717 if((requiredFlags & ~currFlags) == 0)
13720 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
13722 if(currCost < minCost)
13724 *pMemoryTypeIndex = memTypeIndex;
13729 minCost = currCost;
13734 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
13739 const VkBufferCreateInfo* pBufferCreateInfo,
13741 uint32_t* pMemoryTypeIndex)
13743 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13744 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
13745 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13746 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13748 const VkDevice hDev = allocator->m_hDevice;
13749 VkBuffer hBuffer = VK_NULL_HANDLE;
13750 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
13751 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
13752 if(res == VK_SUCCESS)
13754 VkMemoryRequirements memReq = {};
13755 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
13756 hDev, hBuffer, &memReq);
13760 memReq.memoryTypeBits,
13761 pAllocationCreateInfo,
13764 allocator->GetVulkanFunctions().vkDestroyBuffer(
13765 hDev, hBuffer, allocator->GetAllocationCallbacks());
13772 const VkImageCreateInfo* pImageCreateInfo,
13774 uint32_t* pMemoryTypeIndex)
13776 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13777 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
13778 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13779 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13781 const VkDevice hDev = allocator->m_hDevice;
13782 VkImage hImage = VK_NULL_HANDLE;
13783 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
13784 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
13785 if(res == VK_SUCCESS)
13787 VkMemoryRequirements memReq = {};
13788 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
13789 hDev, hImage, &memReq);
13793 memReq.memoryTypeBits,
13794 pAllocationCreateInfo,
13797 allocator->GetVulkanFunctions().vkDestroyImage(
13798 hDev, hImage, allocator->GetAllocationCallbacks());
13808 VMA_ASSERT(allocator && pCreateInfo && pPool);
13810 VMA_DEBUG_LOG(
"vmaCreatePool");
13812 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13814 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
13816 #if VMA_RECORDING_ENABLED 13817 if(allocator->GetRecorder() != VMA_NULL)
13819 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
13830 VMA_ASSERT(allocator);
13832 if(pool == VK_NULL_HANDLE)
13837 VMA_DEBUG_LOG(
"vmaDestroyPool");
13839 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13841 #if VMA_RECORDING_ENABLED 13842 if(allocator->GetRecorder() != VMA_NULL)
13844 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
13848 allocator->DestroyPool(pool);
13856 VMA_ASSERT(allocator && pool && pPoolStats);
13858 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13860 allocator->GetPoolStats(pool, pPoolStats);
13866 size_t* pLostAllocationCount)
13868 VMA_ASSERT(allocator && pool);
13870 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13872 #if VMA_RECORDING_ENABLED 13873 if(allocator->GetRecorder() != VMA_NULL)
13875 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
13879 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
13884 VMA_ASSERT(allocator && pool);
13886 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13888 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
13890 return allocator->CheckPoolCorruption(pool);
13895 const VkMemoryRequirements* pVkMemoryRequirements,
13900 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
13902 VMA_DEBUG_LOG(
"vmaAllocateMemory");
13904 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13906 VkResult result = allocator->AllocateMemory(
13907 *pVkMemoryRequirements,
13913 VMA_SUBALLOCATION_TYPE_UNKNOWN,
13917 #if VMA_RECORDING_ENABLED 13918 if(allocator->GetRecorder() != VMA_NULL)
13920 allocator->GetRecorder()->RecordAllocateMemory(
13921 allocator->GetCurrentFrameIndex(),
13922 *pVkMemoryRequirements,
13928 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
13930 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13938 const VkMemoryRequirements* pVkMemoryRequirements,
13940 size_t allocationCount,
13944 if(allocationCount == 0)
13949 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
13951 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
13953 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13955 VkResult result = allocator->AllocateMemory(
13956 *pVkMemoryRequirements,
13962 VMA_SUBALLOCATION_TYPE_UNKNOWN,
13966 #if VMA_RECORDING_ENABLED 13967 if(allocator->GetRecorder() != VMA_NULL)
13980 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
13982 for(
size_t i = 0; i < allocationCount; ++i)
13984 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
13998 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
14000 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
14002 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14004 VkMemoryRequirements vkMemReq = {};
14005 bool requiresDedicatedAllocation =
false;
14006 bool prefersDedicatedAllocation =
false;
14007 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
14008 requiresDedicatedAllocation,
14009 prefersDedicatedAllocation);
14011 VkResult result = allocator->AllocateMemory(
14013 requiresDedicatedAllocation,
14014 prefersDedicatedAllocation,
14018 VMA_SUBALLOCATION_TYPE_BUFFER,
14022 #if VMA_RECORDING_ENABLED 14023 if(allocator->GetRecorder() != VMA_NULL)
14025 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
14026 allocator->GetCurrentFrameIndex(),
14028 requiresDedicatedAllocation,
14029 prefersDedicatedAllocation,
14035 if(pAllocationInfo && result == VK_SUCCESS)
14037 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14050 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
14052 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
14054 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14056 VkMemoryRequirements vkMemReq = {};
14057 bool requiresDedicatedAllocation =
false;
14058 bool prefersDedicatedAllocation =
false;
14059 allocator->GetImageMemoryRequirements(image, vkMemReq,
14060 requiresDedicatedAllocation, prefersDedicatedAllocation);
14062 VkResult result = allocator->AllocateMemory(
14064 requiresDedicatedAllocation,
14065 prefersDedicatedAllocation,
14069 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
14073 #if VMA_RECORDING_ENABLED 14074 if(allocator->GetRecorder() != VMA_NULL)
14076 allocator->GetRecorder()->RecordAllocateMemoryForImage(
14077 allocator->GetCurrentFrameIndex(),
14079 requiresDedicatedAllocation,
14080 prefersDedicatedAllocation,
14086 if(pAllocationInfo && result == VK_SUCCESS)
14088 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14098 VMA_ASSERT(allocator);
14100 if(allocation == VK_NULL_HANDLE)
14105 VMA_DEBUG_LOG(
"vmaFreeMemory");
14107 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14109 #if VMA_RECORDING_ENABLED 14110 if(allocator->GetRecorder() != VMA_NULL)
14112 allocator->GetRecorder()->RecordFreeMemory(
14113 allocator->GetCurrentFrameIndex(),
14118 allocator->FreeMemory(
14125 size_t allocationCount,
14128 if(allocationCount == 0)
14133 VMA_ASSERT(allocator);
14135 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
14137 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14139 #if VMA_RECORDING_ENABLED 14151 allocator->FreeMemory(allocationCount, pAllocations);
14159 VMA_ASSERT(allocator && allocation && pAllocationInfo);
14161 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14163 #if VMA_RECORDING_ENABLED 14164 if(allocator->GetRecorder() != VMA_NULL)
14166 allocator->GetRecorder()->RecordGetAllocationInfo(
14167 allocator->GetCurrentFrameIndex(),
14172 allocator->GetAllocationInfo(allocation, pAllocationInfo);
14179 VMA_ASSERT(allocator && allocation);
14181 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14183 #if VMA_RECORDING_ENABLED 14184 if(allocator->GetRecorder() != VMA_NULL)
14186 allocator->GetRecorder()->RecordTouchAllocation(
14187 allocator->GetCurrentFrameIndex(),
14192 return allocator->TouchAllocation(allocation);
14200 VMA_ASSERT(allocator && allocation);
14202 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14204 allocation->SetUserData(allocator, pUserData);
14206 #if VMA_RECORDING_ENABLED 14207 if(allocator->GetRecorder() != VMA_NULL)
14209 allocator->GetRecorder()->RecordSetAllocationUserData(
14210 allocator->GetCurrentFrameIndex(),
14221 VMA_ASSERT(allocator && pAllocation);
14223 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
14225 allocator->CreateLostAllocation(pAllocation);
14227 #if VMA_RECORDING_ENABLED 14228 if(allocator->GetRecorder() != VMA_NULL)
14230 allocator->GetRecorder()->RecordCreateLostAllocation(
14231 allocator->GetCurrentFrameIndex(),
14242 VMA_ASSERT(allocator && allocation && ppData);
14244 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14246 VkResult res = allocator->Map(allocation, ppData);
14248 #if VMA_RECORDING_ENABLED 14249 if(allocator->GetRecorder() != VMA_NULL)
14251 allocator->GetRecorder()->RecordMapMemory(
14252 allocator->GetCurrentFrameIndex(),
14264 VMA_ASSERT(allocator && allocation);
14266 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14268 #if VMA_RECORDING_ENABLED 14269 if(allocator->GetRecorder() != VMA_NULL)
14271 allocator->GetRecorder()->RecordUnmapMemory(
14272 allocator->GetCurrentFrameIndex(),
14277 allocator->Unmap(allocation);
14282 VMA_ASSERT(allocator && allocation);
14284 VMA_DEBUG_LOG(
"vmaFlushAllocation");
14286 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14288 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
14290 #if VMA_RECORDING_ENABLED 14291 if(allocator->GetRecorder() != VMA_NULL)
14293 allocator->GetRecorder()->RecordFlushAllocation(
14294 allocator->GetCurrentFrameIndex(),
14295 allocation, offset, size);
14302 VMA_ASSERT(allocator && allocation);
14304 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
14306 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14308 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
14310 #if VMA_RECORDING_ENABLED 14311 if(allocator->GetRecorder() != VMA_NULL)
14313 allocator->GetRecorder()->RecordInvalidateAllocation(
14314 allocator->GetCurrentFrameIndex(),
14315 allocation, offset, size);
14322 VMA_ASSERT(allocator);
14324 VMA_DEBUG_LOG(
"vmaCheckCorruption");
14326 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14328 return allocator->CheckCorruption(memoryTypeBits);
14334 size_t allocationCount,
14335 VkBool32* pAllocationsChanged,
14339 VMA_ASSERT(allocator && pAllocations);
14341 VMA_DEBUG_LOG(
"vmaDefragment");
14343 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14345 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
14353 VMA_ASSERT(allocator && allocation && buffer);
14355 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
14357 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14359 return allocator->BindBufferMemory(allocation, buffer);
14367 VMA_ASSERT(allocator && allocation && image);
14369 VMA_DEBUG_LOG(
"vmaBindImageMemory");
14371 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14373 return allocator->BindImageMemory(allocation, image);
14378 const VkBufferCreateInfo* pBufferCreateInfo,
14384 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
14386 VMA_DEBUG_LOG(
"vmaCreateBuffer");
14388 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14390 *pBuffer = VK_NULL_HANDLE;
14391 *pAllocation = VK_NULL_HANDLE;
14394 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
14395 allocator->m_hDevice,
14397 allocator->GetAllocationCallbacks(),
14402 VkMemoryRequirements vkMemReq = {};
14403 bool requiresDedicatedAllocation =
false;
14404 bool prefersDedicatedAllocation =
false;
14405 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
14406 requiresDedicatedAllocation, prefersDedicatedAllocation);
14410 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
14412 VMA_ASSERT(vkMemReq.alignment %
14413 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
14415 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
14417 VMA_ASSERT(vkMemReq.alignment %
14418 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
14420 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
14422 VMA_ASSERT(vkMemReq.alignment %
14423 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
14427 res = allocator->AllocateMemory(
14429 requiresDedicatedAllocation,
14430 prefersDedicatedAllocation,
14433 *pAllocationCreateInfo,
14434 VMA_SUBALLOCATION_TYPE_BUFFER,
14438 #if VMA_RECORDING_ENABLED 14439 if(allocator->GetRecorder() != VMA_NULL)
14441 allocator->GetRecorder()->RecordCreateBuffer(
14442 allocator->GetCurrentFrameIndex(),
14443 *pBufferCreateInfo,
14444 *pAllocationCreateInfo,
14452 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
14456 #if VMA_STATS_STRING_ENABLED 14457 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
14459 if(pAllocationInfo != VMA_NULL)
14461 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14466 allocator->FreeMemory(
14469 *pAllocation = VK_NULL_HANDLE;
14470 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14471 *pBuffer = VK_NULL_HANDLE;
14474 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14475 *pBuffer = VK_NULL_HANDLE;
14486 VMA_ASSERT(allocator);
14488 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14493 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
14495 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14497 #if VMA_RECORDING_ENABLED 14498 if(allocator->GetRecorder() != VMA_NULL)
14500 allocator->GetRecorder()->RecordDestroyBuffer(
14501 allocator->GetCurrentFrameIndex(),
14506 if(buffer != VK_NULL_HANDLE)
14508 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
14511 if(allocation != VK_NULL_HANDLE)
14513 allocator->FreeMemory(
14521 const VkImageCreateInfo* pImageCreateInfo,
14527 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
14529 VMA_DEBUG_LOG(
"vmaCreateImage");
14531 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14533 *pImage = VK_NULL_HANDLE;
14534 *pAllocation = VK_NULL_HANDLE;
14537 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
14538 allocator->m_hDevice,
14540 allocator->GetAllocationCallbacks(),
14544 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
14545 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
14546 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
14549 VkMemoryRequirements vkMemReq = {};
14550 bool requiresDedicatedAllocation =
false;
14551 bool prefersDedicatedAllocation =
false;
14552 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
14553 requiresDedicatedAllocation, prefersDedicatedAllocation);
14555 res = allocator->AllocateMemory(
14557 requiresDedicatedAllocation,
14558 prefersDedicatedAllocation,
14561 *pAllocationCreateInfo,
14566 #if VMA_RECORDING_ENABLED 14567 if(allocator->GetRecorder() != VMA_NULL)
14569 allocator->GetRecorder()->RecordCreateImage(
14570 allocator->GetCurrentFrameIndex(),
14572 *pAllocationCreateInfo,
14580 res = allocator->BindImageMemory(*pAllocation, *pImage);
14584 #if VMA_STATS_STRING_ENABLED 14585 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
14587 if(pAllocationInfo != VMA_NULL)
14589 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14594 allocator->FreeMemory(
14597 *pAllocation = VK_NULL_HANDLE;
14598 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14599 *pImage = VK_NULL_HANDLE;
14602 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14603 *pImage = VK_NULL_HANDLE;
14614 VMA_ASSERT(allocator);
14616 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14621 VMA_DEBUG_LOG(
"vmaDestroyImage");
14623 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14625 #if VMA_RECORDING_ENABLED 14626 if(allocator->GetRecorder() != VMA_NULL)
14628 allocator->GetRecorder()->RecordDestroyImage(
14629 allocator->GetCurrentFrameIndex(),
14634 if(image != VK_NULL_HANDLE)
14636 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
14638 if(allocation != VK_NULL_HANDLE)
14640 allocator->FreeMemory(
14646 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1567
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1868
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1624
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1598
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2190
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1579
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1825
Definition: vk_mem_alloc.h:1928
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1571
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2290
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1621
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2586
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2079
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1468
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2171
Definition: vk_mem_alloc.h:1905
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1560
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1978
Definition: vk_mem_alloc.h:1852
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1633
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2107
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1686
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1618
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1856
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1758
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1576
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1757
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2590
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1650
VmaStatInfo total
Definition: vk_mem_alloc.h:1767
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2598
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1962
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2581
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1577
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1502
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1627
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2121
Definition: vk_mem_alloc.h:2115
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1693
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2300
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1572
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1596
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1999
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2141
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2177
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1558
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2124
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1803
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2576
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2594
Definition: vk_mem_alloc.h:1842
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1986
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1575
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1763
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1508
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:1946
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1529
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1600
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1534
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2596
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1973
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2187
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1568
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1746
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2136
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1521
Definition: vk_mem_alloc.h:2111
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1912
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1759
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1525
Definition: vk_mem_alloc.h:1936
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2127
Definition: vk_mem_alloc.h:1851
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1574
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1968
Definition: vk_mem_alloc.h:1959
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1749
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1570
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2149
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1636
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2180
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1957
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1992
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1674
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1765
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1892
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1758
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1581
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1606
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1523
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1580
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2163
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1573
Definition: vk_mem_alloc.h:1923
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1614
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2314
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1630
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1758
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1755
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2168
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
Definition: vk_mem_alloc.h:1932
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2295
Definition: vk_mem_alloc.h:1943
Definition: vk_mem_alloc.h:1955
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2592
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1566
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1753
Definition: vk_mem_alloc.h:1808
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2117
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1603
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1751
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1578
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1582
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1879
Definition: vk_mem_alloc.h:1950
Definition: vk_mem_alloc.h:1835
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2309
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1556
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1569
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2096
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2276
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1940
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2061
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1759
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1918
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1590
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1766
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2174
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1759
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2281