23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1483 #ifndef VMA_RECORDING_ENABLED 1485 #define VMA_RECORDING_ENABLED 1 1487 #define VMA_RECORDING_ENABLED 0 1492 #define NOMINMAX // For windows.h 1495 #include <vulkan/vulkan.h> 1497 #if VMA_RECORDING_ENABLED 1498 #include <windows.h> 1501 #if !defined(VMA_DEDICATED_ALLOCATION) 1502 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1503 #define VMA_DEDICATED_ALLOCATION 1 1505 #define VMA_DEDICATED_ALLOCATION 0 1523 uint32_t memoryType,
1524 VkDeviceMemory memory,
1529 uint32_t memoryType,
1530 VkDeviceMemory memory,
1602 #if VMA_DEDICATED_ALLOCATION 1603 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1604 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1730 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1738 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1748 uint32_t memoryTypeIndex,
1749 VkMemoryPropertyFlags* pFlags);
1761 uint32_t frameIndex);
1794 #define VMA_STATS_STRING_ENABLED 1 1796 #if VMA_STATS_STRING_ENABLED 1803 char** ppStatsString,
1804 VkBool32 detailedMap);
1808 char* pStatsString);
1810 #endif // #if VMA_STATS_STRING_ENABLED 2039 uint32_t memoryTypeBits,
2041 uint32_t* pMemoryTypeIndex);
2057 const VkBufferCreateInfo* pBufferCreateInfo,
2059 uint32_t* pMemoryTypeIndex);
2075 const VkImageCreateInfo* pImageCreateInfo,
2077 uint32_t* pMemoryTypeIndex);
2249 size_t* pLostAllocationCount);
2348 const VkMemoryRequirements* pVkMemoryRequirements,
2402 VkDeviceSize newSize);
2635 size_t allocationCount,
2636 VkBool32* pAllocationsChanged,
2702 const VkBufferCreateInfo* pBufferCreateInfo,
2727 const VkImageCreateInfo* pImageCreateInfo,
2753 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2756 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2757 #define VMA_IMPLEMENTATION 2760 #ifdef VMA_IMPLEMENTATION 2761 #undef VMA_IMPLEMENTATION 2783 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2784 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2796 #if VMA_USE_STL_CONTAINERS 2797 #define VMA_USE_STL_VECTOR 1 2798 #define VMA_USE_STL_UNORDERED_MAP 1 2799 #define VMA_USE_STL_LIST 1 2802 #if VMA_USE_STL_VECTOR 2806 #if VMA_USE_STL_UNORDERED_MAP 2807 #include <unordered_map> 2810 #if VMA_USE_STL_LIST 2819 #include <algorithm> 2825 #define VMA_NULL nullptr 2828 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 2830 void *aligned_alloc(
size_t alignment,
size_t size)
2833 if(alignment <
sizeof(
void*))
2835 alignment =
sizeof(
void*);
2838 return memalign(alignment, size);
2840 #elif defined(__APPLE__) || defined(__ANDROID__) 2842 void *aligned_alloc(
size_t alignment,
size_t size)
2845 if(alignment <
sizeof(
void*))
2847 alignment =
sizeof(
void*);
2851 if(posix_memalign(&pointer, alignment, size) == 0)
2865 #define VMA_ASSERT(expr) assert(expr) 2867 #define VMA_ASSERT(expr) 2873 #ifndef VMA_HEAVY_ASSERT 2875 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2877 #define VMA_HEAVY_ASSERT(expr) 2881 #ifndef VMA_ALIGN_OF 2882 #define VMA_ALIGN_OF(type) (__alignof(type)) 2885 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2887 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2889 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2893 #ifndef VMA_SYSTEM_FREE 2895 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2897 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2902 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2906 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2910 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2914 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2917 #ifndef VMA_DEBUG_LOG 2918 #define VMA_DEBUG_LOG(format, ...) 2928 #if VMA_STATS_STRING_ENABLED 2929 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2931 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2933 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2935 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2937 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2939 snprintf(outStr, strLen,
"%p", ptr);
2949 void Lock() { m_Mutex.lock(); }
2950 void Unlock() { m_Mutex.unlock(); }
2954 #define VMA_MUTEX VmaMutex 2965 #ifndef VMA_ATOMIC_UINT32 2966 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2969 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2974 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2977 #ifndef VMA_DEBUG_ALIGNMENT 2982 #define VMA_DEBUG_ALIGNMENT (1) 2985 #ifndef VMA_DEBUG_MARGIN 2990 #define VMA_DEBUG_MARGIN (0) 2993 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2998 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3001 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3007 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3010 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3015 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3018 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3023 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3026 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3027 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3031 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3032 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3036 #ifndef VMA_CLASS_NO_COPY 3037 #define VMA_CLASS_NO_COPY(className) \ 3039 className(const className&) = delete; \ 3040 className& operator=(const className&) = delete; 3043 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3046 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3048 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3049 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3055 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3056 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3059 static inline uint32_t VmaCountBitsSet(uint32_t v)
3061 uint32_t c = v - ((v >> 1) & 0x55555555);
3062 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3063 c = ((c >> 4) + c) & 0x0F0F0F0F;
3064 c = ((c >> 8) + c) & 0x00FF00FF;
3065 c = ((c >> 16) + c) & 0x0000FFFF;
3071 template <
typename T>
3072 static inline T VmaAlignUp(T val, T align)
3074 return (val + align - 1) / align * align;
3078 template <
typename T>
3079 static inline T VmaAlignDown(T val, T align)
3081 return val / align * align;
3085 template <
typename T>
3086 static inline T VmaRoundDiv(T x, T y)
3088 return (x + (y / (T)2)) / y;
3096 template <
typename T>
3097 inline bool VmaIsPow2(T x)
3099 return (x & (x-1)) == 0;
3103 static inline uint32_t VmaNextPow2(uint32_t v)
3114 static inline uint64_t VmaNextPow2(uint64_t v)
3128 static inline uint32_t VmaPrevPow2(uint32_t v)
3138 static inline uint64_t VmaPrevPow2(uint64_t v)
3150 static inline bool VmaStrIsEmpty(
const char* pStr)
3152 return pStr == VMA_NULL || *pStr ==
'\0';
3155 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3173 template<
typename Iterator,
typename Compare>
3174 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3176 Iterator centerValue = end; --centerValue;
3177 Iterator insertIndex = beg;
3178 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3180 if(cmp(*memTypeIndex, *centerValue))
3182 if(insertIndex != memTypeIndex)
3184 VMA_SWAP(*memTypeIndex, *insertIndex);
3189 if(insertIndex != centerValue)
3191 VMA_SWAP(*insertIndex, *centerValue);
3196 template<
typename Iterator,
typename Compare>
3197 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3201 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3202 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3203 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3207 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3209 #endif // #ifndef VMA_SORT 3218 static inline bool VmaBlocksOnSamePage(
3219 VkDeviceSize resourceAOffset,
3220 VkDeviceSize resourceASize,
3221 VkDeviceSize resourceBOffset,
3222 VkDeviceSize pageSize)
3224 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3225 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3226 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3227 VkDeviceSize resourceBStart = resourceBOffset;
3228 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3229 return resourceAEndPage == resourceBStartPage;
3232 enum VmaSuballocationType
3234 VMA_SUBALLOCATION_TYPE_FREE = 0,
3235 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3236 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3237 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3238 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3239 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3240 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3249 static inline bool VmaIsBufferImageGranularityConflict(
3250 VmaSuballocationType suballocType1,
3251 VmaSuballocationType suballocType2)
3253 if(suballocType1 > suballocType2)
3255 VMA_SWAP(suballocType1, suballocType2);
3258 switch(suballocType1)
3260 case VMA_SUBALLOCATION_TYPE_FREE:
3262 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3264 case VMA_SUBALLOCATION_TYPE_BUFFER:
3266 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3267 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3268 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3270 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3271 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3272 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3273 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3275 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3276 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3284 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3286 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3287 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3288 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3290 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3294 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3296 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3297 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3298 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3300 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3311 VMA_CLASS_NO_COPY(VmaMutexLock)
3313 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3314 m_pMutex(useMutex ? &mutex : VMA_NULL)
3331 VMA_MUTEX* m_pMutex;
3334 #if VMA_DEBUG_GLOBAL_MUTEX 3335 static VMA_MUTEX gDebugGlobalMutex;
3336 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3338 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3342 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3353 template <
typename CmpLess,
typename IterT,
typename KeyT>
3354 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3356 size_t down = 0, up = (end - beg);
3359 const size_t mid = (down + up) / 2;
3360 if(cmp(*(beg+mid), key))
3375 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3377 if((pAllocationCallbacks != VMA_NULL) &&
3378 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3380 return (*pAllocationCallbacks->pfnAllocation)(
3381 pAllocationCallbacks->pUserData,
3384 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3388 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3392 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3394 if((pAllocationCallbacks != VMA_NULL) &&
3395 (pAllocationCallbacks->pfnFree != VMA_NULL))
3397 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3401 VMA_SYSTEM_FREE(ptr);
3405 template<
typename T>
3406 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3408 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3411 template<
typename T>
3412 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3414 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3417 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3419 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3421 template<
typename T>
3422 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3425 VmaFree(pAllocationCallbacks, ptr);
3428 template<
typename T>
3429 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3433 for(
size_t i = count; i--; )
3437 VmaFree(pAllocationCallbacks, ptr);
3442 template<
typename T>
3443 class VmaStlAllocator
3446 const VkAllocationCallbacks*
const m_pCallbacks;
3447 typedef T value_type;
3449 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3450 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3452 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3453 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3455 template<
typename U>
3456 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3458 return m_pCallbacks == rhs.m_pCallbacks;
3460 template<
typename U>
3461 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3463 return m_pCallbacks != rhs.m_pCallbacks;
3466 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3469 #if VMA_USE_STL_VECTOR 3471 #define VmaVector std::vector 3473 template<
typename T,
typename allocatorT>
3474 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3476 vec.insert(vec.begin() + index, item);
3479 template<
typename T,
typename allocatorT>
3480 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3482 vec.erase(vec.begin() + index);
3485 #else // #if VMA_USE_STL_VECTOR 3490 template<
typename T,
typename AllocatorT>
3494 typedef T value_type;
3496 VmaVector(
const AllocatorT& allocator) :
3497 m_Allocator(allocator),
3504 VmaVector(
size_t count,
const AllocatorT& allocator) :
3505 m_Allocator(allocator),
3506 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3512 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3513 m_Allocator(src.m_Allocator),
3514 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3515 m_Count(src.m_Count),
3516 m_Capacity(src.m_Count)
3520 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3526 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3529 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3533 resize(rhs.m_Count);
3536 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3542 bool empty()
const {
return m_Count == 0; }
3543 size_t size()
const {
return m_Count; }
3544 T* data() {
return m_pArray; }
3545 const T* data()
const {
return m_pArray; }
3547 T& operator[](
size_t index)
3549 VMA_HEAVY_ASSERT(index < m_Count);
3550 return m_pArray[index];
3552 const T& operator[](
size_t index)
const 3554 VMA_HEAVY_ASSERT(index < m_Count);
3555 return m_pArray[index];
3560 VMA_HEAVY_ASSERT(m_Count > 0);
3563 const T& front()
const 3565 VMA_HEAVY_ASSERT(m_Count > 0);
3570 VMA_HEAVY_ASSERT(m_Count > 0);
3571 return m_pArray[m_Count - 1];
3573 const T& back()
const 3575 VMA_HEAVY_ASSERT(m_Count > 0);
3576 return m_pArray[m_Count - 1];
3579 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3581 newCapacity = VMA_MAX(newCapacity, m_Count);
3583 if((newCapacity < m_Capacity) && !freeMemory)
3585 newCapacity = m_Capacity;
3588 if(newCapacity != m_Capacity)
3590 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3593 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3595 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3596 m_Capacity = newCapacity;
3597 m_pArray = newArray;
3601 void resize(
size_t newCount,
bool freeMemory =
false)
3603 size_t newCapacity = m_Capacity;
3604 if(newCount > m_Capacity)
3606 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3610 newCapacity = newCount;
3613 if(newCapacity != m_Capacity)
3615 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3616 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3617 if(elementsToCopy != 0)
3619 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3621 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3622 m_Capacity = newCapacity;
3623 m_pArray = newArray;
3629 void clear(
bool freeMemory =
false)
3631 resize(0, freeMemory);
3634 void insert(
size_t index,
const T& src)
3636 VMA_HEAVY_ASSERT(index <= m_Count);
3637 const size_t oldCount = size();
3638 resize(oldCount + 1);
3639 if(index < oldCount)
3641 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3643 m_pArray[index] = src;
3646 void remove(
size_t index)
3648 VMA_HEAVY_ASSERT(index < m_Count);
3649 const size_t oldCount = size();
3650 if(index < oldCount - 1)
3652 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3654 resize(oldCount - 1);
3657 void push_back(
const T& src)
3659 const size_t newIndex = size();
3660 resize(newIndex + 1);
3661 m_pArray[newIndex] = src;
3666 VMA_HEAVY_ASSERT(m_Count > 0);
3670 void push_front(
const T& src)
3677 VMA_HEAVY_ASSERT(m_Count > 0);
3681 typedef T* iterator;
3683 iterator begin() {
return m_pArray; }
3684 iterator end() {
return m_pArray + m_Count; }
3687 AllocatorT m_Allocator;
3693 template<
typename T,
typename allocatorT>
3694 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3696 vec.insert(index, item);
3699 template<
typename T,
typename allocatorT>
3700 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3705 #endif // #if VMA_USE_STL_VECTOR 3707 template<
typename CmpLess,
typename VectorT>
3708 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3710 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3712 vector.data() + vector.size(),
3714 CmpLess()) - vector.data();
3715 VmaVectorInsert(vector, indexToInsert, value);
3716 return indexToInsert;
3719 template<
typename CmpLess,
typename VectorT>
3720 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3723 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3728 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3730 size_t indexToRemove = it - vector.begin();
3731 VmaVectorRemove(vector, indexToRemove);
3737 template<
typename CmpLess,
typename IterT,
typename KeyT>
3738 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3741 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3742 beg, end, value, comparator);
3744 (!comparator(*it, value) && !comparator(value, *it)))
3759 template<
typename T>
3760 class VmaPoolAllocator
3762 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3764 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3765 ~VmaPoolAllocator();
3773 uint32_t NextFreeIndex;
3780 uint32_t FirstFreeIndex;
3783 const VkAllocationCallbacks* m_pAllocationCallbacks;
3784 size_t m_ItemsPerBlock;
3785 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3787 ItemBlock& CreateNewBlock();
3790 template<
typename T>
3791 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3792 m_pAllocationCallbacks(pAllocationCallbacks),
3793 m_ItemsPerBlock(itemsPerBlock),
3794 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3796 VMA_ASSERT(itemsPerBlock > 0);
3799 template<
typename T>
3800 VmaPoolAllocator<T>::~VmaPoolAllocator()
3805 template<
typename T>
3806 void VmaPoolAllocator<T>::Clear()
3808 for(
size_t i = m_ItemBlocks.size(); i--; )
3809 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3810 m_ItemBlocks.clear();
3813 template<
typename T>
3814 T* VmaPoolAllocator<T>::Alloc()
3816 for(
size_t i = m_ItemBlocks.size(); i--; )
3818 ItemBlock& block = m_ItemBlocks[i];
3820 if(block.FirstFreeIndex != UINT32_MAX)
3822 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3823 block.FirstFreeIndex = pItem->NextFreeIndex;
3824 return &pItem->Value;
3829 ItemBlock& newBlock = CreateNewBlock();
3830 Item*
const pItem = &newBlock.pItems[0];
3831 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3832 return &pItem->Value;
3835 template<
typename T>
3836 void VmaPoolAllocator<T>::Free(T* ptr)
3839 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3841 ItemBlock& block = m_ItemBlocks[i];
3845 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3848 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3850 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3851 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3852 block.FirstFreeIndex = index;
3856 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3859 template<
typename T>
3860 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3862 ItemBlock newBlock = {
3863 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3865 m_ItemBlocks.push_back(newBlock);
3868 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3869 newBlock.pItems[i].NextFreeIndex = i + 1;
3870 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3871 return m_ItemBlocks.back();
3877 #if VMA_USE_STL_LIST 3879 #define VmaList std::list 3881 #else // #if VMA_USE_STL_LIST 3883 template<
typename T>
3892 template<
typename T>
3895 VMA_CLASS_NO_COPY(VmaRawList)
3897 typedef VmaListItem<T> ItemType;
3899 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3903 size_t GetCount()
const {
return m_Count; }
3904 bool IsEmpty()
const {
return m_Count == 0; }
3906 ItemType* Front() {
return m_pFront; }
3907 const ItemType* Front()
const {
return m_pFront; }
3908 ItemType* Back() {
return m_pBack; }
3909 const ItemType* Back()
const {
return m_pBack; }
3911 ItemType* PushBack();
3912 ItemType* PushFront();
3913 ItemType* PushBack(
const T& value);
3914 ItemType* PushFront(
const T& value);
3919 ItemType* InsertBefore(ItemType* pItem);
3921 ItemType* InsertAfter(ItemType* pItem);
3923 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3924 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3926 void Remove(ItemType* pItem);
3929 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3930 VmaPoolAllocator<ItemType> m_ItemAllocator;
3936 template<
typename T>
3937 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3938 m_pAllocationCallbacks(pAllocationCallbacks),
3939 m_ItemAllocator(pAllocationCallbacks, 128),
3946 template<
typename T>
3947 VmaRawList<T>::~VmaRawList()
3953 template<
typename T>
3954 void VmaRawList<T>::Clear()
3956 if(IsEmpty() ==
false)
3958 ItemType* pItem = m_pBack;
3959 while(pItem != VMA_NULL)
3961 ItemType*
const pPrevItem = pItem->pPrev;
3962 m_ItemAllocator.Free(pItem);
3965 m_pFront = VMA_NULL;
3971 template<
typename T>
3972 VmaListItem<T>* VmaRawList<T>::PushBack()
3974 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3975 pNewItem->pNext = VMA_NULL;
3978 pNewItem->pPrev = VMA_NULL;
3979 m_pFront = pNewItem;
3985 pNewItem->pPrev = m_pBack;
3986 m_pBack->pNext = pNewItem;
3993 template<
typename T>
3994 VmaListItem<T>* VmaRawList<T>::PushFront()
3996 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3997 pNewItem->pPrev = VMA_NULL;
4000 pNewItem->pNext = VMA_NULL;
4001 m_pFront = pNewItem;
4007 pNewItem->pNext = m_pFront;
4008 m_pFront->pPrev = pNewItem;
4009 m_pFront = pNewItem;
4015 template<
typename T>
4016 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4018 ItemType*
const pNewItem = PushBack();
4019 pNewItem->Value = value;
4023 template<
typename T>
4024 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4026 ItemType*
const pNewItem = PushFront();
4027 pNewItem->Value = value;
4031 template<
typename T>
4032 void VmaRawList<T>::PopBack()
4034 VMA_HEAVY_ASSERT(m_Count > 0);
4035 ItemType*
const pBackItem = m_pBack;
4036 ItemType*
const pPrevItem = pBackItem->pPrev;
4037 if(pPrevItem != VMA_NULL)
4039 pPrevItem->pNext = VMA_NULL;
4041 m_pBack = pPrevItem;
4042 m_ItemAllocator.Free(pBackItem);
4046 template<
typename T>
4047 void VmaRawList<T>::PopFront()
4049 VMA_HEAVY_ASSERT(m_Count > 0);
4050 ItemType*
const pFrontItem = m_pFront;
4051 ItemType*
const pNextItem = pFrontItem->pNext;
4052 if(pNextItem != VMA_NULL)
4054 pNextItem->pPrev = VMA_NULL;
4056 m_pFront = pNextItem;
4057 m_ItemAllocator.Free(pFrontItem);
4061 template<
typename T>
4062 void VmaRawList<T>::Remove(ItemType* pItem)
4064 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4065 VMA_HEAVY_ASSERT(m_Count > 0);
4067 if(pItem->pPrev != VMA_NULL)
4069 pItem->pPrev->pNext = pItem->pNext;
4073 VMA_HEAVY_ASSERT(m_pFront == pItem);
4074 m_pFront = pItem->pNext;
4077 if(pItem->pNext != VMA_NULL)
4079 pItem->pNext->pPrev = pItem->pPrev;
4083 VMA_HEAVY_ASSERT(m_pBack == pItem);
4084 m_pBack = pItem->pPrev;
4087 m_ItemAllocator.Free(pItem);
4091 template<
typename T>
4092 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4094 if(pItem != VMA_NULL)
4096 ItemType*
const prevItem = pItem->pPrev;
4097 ItemType*
const newItem = m_ItemAllocator.Alloc();
4098 newItem->pPrev = prevItem;
4099 newItem->pNext = pItem;
4100 pItem->pPrev = newItem;
4101 if(prevItem != VMA_NULL)
4103 prevItem->pNext = newItem;
4107 VMA_HEAVY_ASSERT(m_pFront == pItem);
4117 template<
typename T>
4118 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4120 if(pItem != VMA_NULL)
4122 ItemType*
const nextItem = pItem->pNext;
4123 ItemType*
const newItem = m_ItemAllocator.Alloc();
4124 newItem->pNext = nextItem;
4125 newItem->pPrev = pItem;
4126 pItem->pNext = newItem;
4127 if(nextItem != VMA_NULL)
4129 nextItem->pPrev = newItem;
4133 VMA_HEAVY_ASSERT(m_pBack == pItem);
4143 template<
typename T>
4144 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4146 ItemType*
const newItem = InsertBefore(pItem);
4147 newItem->Value = value;
4151 template<
typename T>
4152 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4154 ItemType*
const newItem = InsertAfter(pItem);
4155 newItem->Value = value;
4159 template<
typename T,
typename AllocatorT>
4162 VMA_CLASS_NO_COPY(VmaList)
4173 T& operator*()
const 4175 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4176 return m_pItem->Value;
4178 T* operator->()
const 4180 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4181 return &m_pItem->Value;
4184 iterator& operator++()
4186 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4187 m_pItem = m_pItem->pNext;
4190 iterator& operator--()
4192 if(m_pItem != VMA_NULL)
4194 m_pItem = m_pItem->pPrev;
4198 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4199 m_pItem = m_pList->Back();
4204 iterator operator++(
int)
4206 iterator result = *
this;
4210 iterator operator--(
int)
4212 iterator result = *
this;
4217 bool operator==(
const iterator& rhs)
const 4219 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4220 return m_pItem == rhs.m_pItem;
4222 bool operator!=(
const iterator& rhs)
const 4224 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4225 return m_pItem != rhs.m_pItem;
4229 VmaRawList<T>* m_pList;
4230 VmaListItem<T>* m_pItem;
4232 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4238 friend class VmaList<T, AllocatorT>;
4241 class const_iterator
4250 const_iterator(
const iterator& src) :
4251 m_pList(src.m_pList),
4252 m_pItem(src.m_pItem)
4256 const T& operator*()
const 4258 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4259 return m_pItem->Value;
4261 const T* operator->()
const 4263 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4264 return &m_pItem->Value;
4267 const_iterator& operator++()
4269 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4270 m_pItem = m_pItem->pNext;
4273 const_iterator& operator--()
4275 if(m_pItem != VMA_NULL)
4277 m_pItem = m_pItem->pPrev;
4281 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4282 m_pItem = m_pList->Back();
4287 const_iterator operator++(
int)
4289 const_iterator result = *
this;
4293 const_iterator operator--(
int)
4295 const_iterator result = *
this;
4300 bool operator==(
const const_iterator& rhs)
const 4302 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4303 return m_pItem == rhs.m_pItem;
4305 bool operator!=(
const const_iterator& rhs)
const 4307 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4308 return m_pItem != rhs.m_pItem;
4312 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4318 const VmaRawList<T>* m_pList;
4319 const VmaListItem<T>* m_pItem;
4321 friend class VmaList<T, AllocatorT>;
4324 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4326 bool empty()
const {
return m_RawList.IsEmpty(); }
4327 size_t size()
const {
return m_RawList.GetCount(); }
4329 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4330 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4332 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4333 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4335 void clear() { m_RawList.Clear(); }
4336 void push_back(
const T& value) { m_RawList.PushBack(value); }
4337 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4338 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4341 VmaRawList<T> m_RawList;
4344 #endif // #if VMA_USE_STL_LIST 4352 #if VMA_USE_STL_UNORDERED_MAP 4354 #define VmaPair std::pair 4356 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4357 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4359 #else // #if VMA_USE_STL_UNORDERED_MAP 4361 template<
typename T1,
typename T2>
4367 VmaPair() : first(), second() { }
4368 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4374 template<
typename KeyT,
typename ValueT>
4378 typedef VmaPair<KeyT, ValueT> PairType;
4379 typedef PairType* iterator;
4381 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4383 iterator begin() {
return m_Vector.begin(); }
4384 iterator end() {
return m_Vector.end(); }
4386 void insert(
const PairType& pair);
4387 iterator find(
const KeyT& key);
4388 void erase(iterator it);
4391 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4394 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4396 template<
typename FirstT,
typename SecondT>
4397 struct VmaPairFirstLess
4399 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4401 return lhs.first < rhs.first;
4403 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4405 return lhs.first < rhsFirst;
4409 template<
typename KeyT,
typename ValueT>
4410 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4412 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4414 m_Vector.data() + m_Vector.size(),
4416 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4417 VmaVectorInsert(m_Vector, indexToInsert, pair);
4420 template<
typename KeyT,
typename ValueT>
4421 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4423 PairType* it = VmaBinaryFindFirstNotLess(
4425 m_Vector.data() + m_Vector.size(),
4427 VmaPairFirstLess<KeyT, ValueT>());
4428 if((it != m_Vector.end()) && (it->first == key))
4434 return m_Vector.end();
4438 template<
typename KeyT,
typename ValueT>
4439 void VmaMap<KeyT, ValueT>::erase(iterator it)
4441 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4444 #endif // #if VMA_USE_STL_UNORDERED_MAP 4450 class VmaDeviceMemoryBlock;
4452 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4454 struct VmaAllocation_T
4456 VMA_CLASS_NO_COPY(VmaAllocation_T)
4458 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4462 FLAG_USER_DATA_STRING = 0x01,
4466 enum ALLOCATION_TYPE
4468 ALLOCATION_TYPE_NONE,
4469 ALLOCATION_TYPE_BLOCK,
4470 ALLOCATION_TYPE_DEDICATED,
4473 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4476 m_pUserData(VMA_NULL),
4477 m_LastUseFrameIndex(currentFrameIndex),
4478 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4479 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4481 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4483 #if VMA_STATS_STRING_ENABLED 4484 m_CreationFrameIndex = currentFrameIndex;
4485 m_BufferImageUsage = 0;
4491 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4494 VMA_ASSERT(m_pUserData == VMA_NULL);
4497 void InitBlockAllocation(
4499 VmaDeviceMemoryBlock* block,
4500 VkDeviceSize offset,
4501 VkDeviceSize alignment,
4503 VmaSuballocationType suballocationType,
4507 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4508 VMA_ASSERT(block != VMA_NULL);
4509 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4510 m_Alignment = alignment;
4512 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4513 m_SuballocationType = (uint8_t)suballocationType;
4514 m_BlockAllocation.m_hPool = hPool;
4515 m_BlockAllocation.m_Block = block;
4516 m_BlockAllocation.m_Offset = offset;
4517 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4522 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4523 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4524 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4525 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4526 m_BlockAllocation.m_Block = VMA_NULL;
4527 m_BlockAllocation.m_Offset = 0;
4528 m_BlockAllocation.m_CanBecomeLost =
true;
4531 void ChangeBlockAllocation(
4533 VmaDeviceMemoryBlock* block,
4534 VkDeviceSize offset);
4536 void ChangeSize(VkDeviceSize newSize);
4539 void InitDedicatedAllocation(
4540 uint32_t memoryTypeIndex,
4541 VkDeviceMemory hMemory,
4542 VmaSuballocationType suballocationType,
4546 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4547 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4548 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4551 m_SuballocationType = (uint8_t)suballocationType;
4552 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4553 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4554 m_DedicatedAllocation.m_hMemory = hMemory;
4555 m_DedicatedAllocation.m_pMappedData = pMappedData;
4558 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4559 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4560 VkDeviceSize GetSize()
const {
return m_Size; }
4561 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4562 void* GetUserData()
const {
return m_pUserData; }
4563 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4564 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4566 VmaDeviceMemoryBlock* GetBlock()
const 4568 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4569 return m_BlockAllocation.m_Block;
4571 VkDeviceSize GetOffset()
const;
4572 VkDeviceMemory GetMemory()
const;
4573 uint32_t GetMemoryTypeIndex()
const;
4574 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4575 void* GetMappedData()
const;
4576 bool CanBecomeLost()
const;
4579 uint32_t GetLastUseFrameIndex()
const 4581 return m_LastUseFrameIndex.load();
4583 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4585 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4595 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4597 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4599 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4610 void BlockAllocMap();
4611 void BlockAllocUnmap();
4612 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4615 #if VMA_STATS_STRING_ENABLED 4616 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4617 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4619 void InitBufferImageUsage(uint32_t bufferImageUsage)
4621 VMA_ASSERT(m_BufferImageUsage == 0);
4622 m_BufferImageUsage = bufferImageUsage;
4625 void PrintParameters(
class VmaJsonWriter& json)
const;
4629 VkDeviceSize m_Alignment;
4630 VkDeviceSize m_Size;
4632 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4634 uint8_t m_SuballocationType;
4641 struct BlockAllocation
4644 VmaDeviceMemoryBlock* m_Block;
4645 VkDeviceSize m_Offset;
4646 bool m_CanBecomeLost;
4650 struct DedicatedAllocation
4652 uint32_t m_MemoryTypeIndex;
4653 VkDeviceMemory m_hMemory;
4654 void* m_pMappedData;
4660 BlockAllocation m_BlockAllocation;
4662 DedicatedAllocation m_DedicatedAllocation;
4665 #if VMA_STATS_STRING_ENABLED 4666 uint32_t m_CreationFrameIndex;
4667 uint32_t m_BufferImageUsage;
4677 struct VmaSuballocation
4679 VkDeviceSize offset;
4682 VmaSuballocationType type;
4686 struct VmaSuballocationOffsetLess
4688 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4690 return lhs.offset < rhs.offset;
4693 struct VmaSuballocationOffsetGreater
4695 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4697 return lhs.offset > rhs.offset;
4701 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4704 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4719 struct VmaAllocationRequest
4721 VkDeviceSize offset;
4722 VkDeviceSize sumFreeSize;
4723 VkDeviceSize sumItemSize;
4724 VmaSuballocationList::iterator item;
4725 size_t itemsToMakeLostCount;
4728 VkDeviceSize CalcCost()
const 4730 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4738 class VmaBlockMetadata
4742 virtual ~VmaBlockMetadata() { }
4743 virtual void Init(VkDeviceSize size) { m_Size = size; }
4746 virtual bool Validate()
const = 0;
4747 VkDeviceSize GetSize()
const {
return m_Size; }
4748 virtual size_t GetAllocationCount()
const = 0;
4749 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4750 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4752 virtual bool IsEmpty()
const = 0;
4754 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4756 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4758 #if VMA_STATS_STRING_ENABLED 4759 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4765 virtual bool CreateAllocationRequest(
4766 uint32_t currentFrameIndex,
4767 uint32_t frameInUseCount,
4768 VkDeviceSize bufferImageGranularity,
4769 VkDeviceSize allocSize,
4770 VkDeviceSize allocAlignment,
4772 VmaSuballocationType allocType,
4773 bool canMakeOtherLost,
4775 VmaAllocationRequest* pAllocationRequest) = 0;
4777 virtual bool MakeRequestedAllocationsLost(
4778 uint32_t currentFrameIndex,
4779 uint32_t frameInUseCount,
4780 VmaAllocationRequest* pAllocationRequest) = 0;
4782 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4784 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4788 const VmaAllocationRequest& request,
4789 VmaSuballocationType type,
4790 VkDeviceSize allocSize,
4796 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4799 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
4802 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
4804 #if VMA_STATS_STRING_ENABLED 4805 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4806 VkDeviceSize unusedBytes,
4807 size_t allocationCount,
4808 size_t unusedRangeCount)
const;
4809 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4810 VkDeviceSize offset,
4812 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4813 VkDeviceSize offset,
4814 VkDeviceSize size)
const;
4815 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4819 VkDeviceSize m_Size;
4820 const VkAllocationCallbacks* m_pAllocationCallbacks;
4823 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 4824 VMA_ASSERT(0 && "Validation failed: " #cond); \ 4828 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4830 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4833 virtual ~VmaBlockMetadata_Generic();
4834 virtual void Init(VkDeviceSize size);
4836 virtual bool Validate()
const;
4837 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4838 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4839 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4840 virtual bool IsEmpty()
const;
4842 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4843 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4845 #if VMA_STATS_STRING_ENABLED 4846 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4849 virtual bool CreateAllocationRequest(
4850 uint32_t currentFrameIndex,
4851 uint32_t frameInUseCount,
4852 VkDeviceSize bufferImageGranularity,
4853 VkDeviceSize allocSize,
4854 VkDeviceSize allocAlignment,
4856 VmaSuballocationType allocType,
4857 bool canMakeOtherLost,
4859 VmaAllocationRequest* pAllocationRequest);
4861 virtual bool MakeRequestedAllocationsLost(
4862 uint32_t currentFrameIndex,
4863 uint32_t frameInUseCount,
4864 VmaAllocationRequest* pAllocationRequest);
4866 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4868 virtual VkResult CheckCorruption(
const void* pBlockData);
4871 const VmaAllocationRequest& request,
4872 VmaSuballocationType type,
4873 VkDeviceSize allocSize,
4878 virtual void FreeAtOffset(VkDeviceSize offset);
4880 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
4883 uint32_t m_FreeCount;
4884 VkDeviceSize m_SumFreeSize;
4885 VmaSuballocationList m_Suballocations;
4888 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4890 bool ValidateFreeSuballocationList()
const;
4894 bool CheckAllocation(
4895 uint32_t currentFrameIndex,
4896 uint32_t frameInUseCount,
4897 VkDeviceSize bufferImageGranularity,
4898 VkDeviceSize allocSize,
4899 VkDeviceSize allocAlignment,
4900 VmaSuballocationType allocType,
4901 VmaSuballocationList::const_iterator suballocItem,
4902 bool canMakeOtherLost,
4903 VkDeviceSize* pOffset,
4904 size_t* itemsToMakeLostCount,
4905 VkDeviceSize* pSumFreeSize,
4906 VkDeviceSize* pSumItemSize)
const;
4908 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4912 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4915 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4918 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4999 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5001 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5004 virtual ~VmaBlockMetadata_Linear();
5005 virtual void Init(VkDeviceSize size);
5007 virtual bool Validate()
const;
5008 virtual size_t GetAllocationCount()
const;
5009 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5010 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5011 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5013 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5014 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5016 #if VMA_STATS_STRING_ENABLED 5017 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5020 virtual bool CreateAllocationRequest(
5021 uint32_t currentFrameIndex,
5022 uint32_t frameInUseCount,
5023 VkDeviceSize bufferImageGranularity,
5024 VkDeviceSize allocSize,
5025 VkDeviceSize allocAlignment,
5027 VmaSuballocationType allocType,
5028 bool canMakeOtherLost,
5030 VmaAllocationRequest* pAllocationRequest);
5032 virtual bool MakeRequestedAllocationsLost(
5033 uint32_t currentFrameIndex,
5034 uint32_t frameInUseCount,
5035 VmaAllocationRequest* pAllocationRequest);
5037 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5039 virtual VkResult CheckCorruption(
const void* pBlockData);
5042 const VmaAllocationRequest& request,
5043 VmaSuballocationType type,
5044 VkDeviceSize allocSize,
5049 virtual void FreeAtOffset(VkDeviceSize offset);
5059 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5061 enum SECOND_VECTOR_MODE
5063 SECOND_VECTOR_EMPTY,
5068 SECOND_VECTOR_RING_BUFFER,
5074 SECOND_VECTOR_DOUBLE_STACK,
5077 VkDeviceSize m_SumFreeSize;
5078 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5079 uint32_t m_1stVectorIndex;
5080 SECOND_VECTOR_MODE m_2ndVectorMode;
5082 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5083 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5084 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5085 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5088 size_t m_1stNullItemsBeginCount;
5090 size_t m_1stNullItemsMiddleCount;
5092 size_t m_2ndNullItemsCount;
5094 bool ShouldCompact1st()
const;
5095 void CleanupAfterFree();
5109 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5111 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5114 virtual ~VmaBlockMetadata_Buddy();
5115 virtual void Init(VkDeviceSize size);
5117 virtual bool Validate()
const;
5118 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5119 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5120 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5121 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5123 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5124 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5126 #if VMA_STATS_STRING_ENABLED 5127 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5130 virtual bool CreateAllocationRequest(
5131 uint32_t currentFrameIndex,
5132 uint32_t frameInUseCount,
5133 VkDeviceSize bufferImageGranularity,
5134 VkDeviceSize allocSize,
5135 VkDeviceSize allocAlignment,
5137 VmaSuballocationType allocType,
5138 bool canMakeOtherLost,
5140 VmaAllocationRequest* pAllocationRequest);
5142 virtual bool MakeRequestedAllocationsLost(
5143 uint32_t currentFrameIndex,
5144 uint32_t frameInUseCount,
5145 VmaAllocationRequest* pAllocationRequest);
5147 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5149 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5152 const VmaAllocationRequest& request,
5153 VmaSuballocationType type,
5154 VkDeviceSize allocSize,
5158 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5159 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5162 static const VkDeviceSize MIN_NODE_SIZE = 32;
5163 static const size_t MAX_LEVELS = 30;
5165 struct ValidationContext
5167 size_t calculatedAllocationCount;
5168 size_t calculatedFreeCount;
5169 VkDeviceSize calculatedSumFreeSize;
5171 ValidationContext() :
5172 calculatedAllocationCount(0),
5173 calculatedFreeCount(0),
5174 calculatedSumFreeSize(0) { }
5179 VkDeviceSize offset;
5209 VkDeviceSize m_UsableSize;
5210 uint32_t m_LevelCount;
5216 } m_FreeList[MAX_LEVELS];
5218 size_t m_AllocationCount;
5222 VkDeviceSize m_SumFreeSize;
5224 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5225 void DeleteNode(Node* node);
5226 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5227 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5228 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5230 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5231 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5235 void AddToFreeListFront(uint32_t level, Node* node);
5239 void RemoveFromFreeList(uint32_t level, Node* node);
5241 #if VMA_STATS_STRING_ENABLED 5242 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5252 class VmaDeviceMemoryBlock
5254 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5256 VmaBlockMetadata* m_pMetadata;
5260 ~VmaDeviceMemoryBlock()
5262 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5263 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5269 uint32_t newMemoryTypeIndex,
5270 VkDeviceMemory newMemory,
5271 VkDeviceSize newSize,
5273 uint32_t algorithm);
5277 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5278 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5279 uint32_t GetId()
const {
return m_Id; }
5280 void* GetMappedData()
const {
return m_pMappedData; }
5283 bool Validate()
const;
5288 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5291 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5292 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5294 VkResult BindBufferMemory(
5298 VkResult BindImageMemory(
5304 uint32_t m_MemoryTypeIndex;
5306 VkDeviceMemory m_hMemory;
5311 uint32_t m_MapCount;
5312 void* m_pMappedData;
5315 struct VmaPointerLess
5317 bool operator()(
const void* lhs,
const void* rhs)
const 5323 class VmaDefragmentator;
5331 struct VmaBlockVector
5333 VMA_CLASS_NO_COPY(VmaBlockVector)
5337 uint32_t memoryTypeIndex,
5338 VkDeviceSize preferredBlockSize,
5339 size_t minBlockCount,
5340 size_t maxBlockCount,
5341 VkDeviceSize bufferImageGranularity,
5342 uint32_t frameInUseCount,
5344 bool explicitBlockSize,
5345 uint32_t algorithm);
5348 VkResult CreateMinBlocks();
5350 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5351 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5352 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5353 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5354 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5358 bool IsEmpty()
const {
return m_Blocks.empty(); }
5359 bool IsCorruptionDetectionEnabled()
const;
5363 uint32_t currentFrameIndex,
5365 VkDeviceSize alignment,
5367 VmaSuballocationType suballocType,
5376 #if VMA_STATS_STRING_ENABLED 5377 void PrintDetailedMap(
class VmaJsonWriter& json);
5380 void MakePoolAllocationsLost(
5381 uint32_t currentFrameIndex,
5382 size_t* pLostAllocationCount);
5383 VkResult CheckCorruption();
5385 VmaDefragmentator* EnsureDefragmentator(
5387 uint32_t currentFrameIndex);
5389 VkResult Defragment(
5391 VkDeviceSize& maxBytesToMove,
5392 uint32_t& maxAllocationsToMove);
5394 void DestroyDefragmentator();
5397 friend class VmaDefragmentator;
5400 const uint32_t m_MemoryTypeIndex;
5401 const VkDeviceSize m_PreferredBlockSize;
5402 const size_t m_MinBlockCount;
5403 const size_t m_MaxBlockCount;
5404 const VkDeviceSize m_BufferImageGranularity;
5405 const uint32_t m_FrameInUseCount;
5406 const bool m_IsCustomPool;
5407 const bool m_ExplicitBlockSize;
5408 const uint32_t m_Algorithm;
5409 bool m_HasEmptyBlock;
5412 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5416 VmaDefragmentator* m_pDefragmentator;
5417 uint32_t m_NextBlockId;
5419 VkDeviceSize CalcMaxBlockSize()
const;
5422 void Remove(VmaDeviceMemoryBlock* pBlock);
5426 void IncrementallySortBlocks();
5429 VkResult AllocateFromBlock(
5430 VmaDeviceMemoryBlock* pBlock,
5432 uint32_t currentFrameIndex,
5434 VkDeviceSize alignment,
5437 VmaSuballocationType suballocType,
5441 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5446 VMA_CLASS_NO_COPY(VmaPool_T)
5448 VmaBlockVector m_BlockVector;
5453 VkDeviceSize preferredBlockSize);
5456 uint32_t GetId()
const {
return m_Id; }
5457 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5459 #if VMA_STATS_STRING_ENABLED 5467 class VmaDefragmentator
5469 VMA_CLASS_NO_COPY(VmaDefragmentator)
5472 VmaBlockVector*
const m_pBlockVector;
5473 uint32_t m_CurrentFrameIndex;
5474 VkDeviceSize m_BytesMoved;
5475 uint32_t m_AllocationsMoved;
5477 struct AllocationInfo
5480 VkBool32* m_pChanged;
5483 m_hAllocation(VK_NULL_HANDLE),
5484 m_pChanged(VMA_NULL)
5489 struct AllocationInfoSizeGreater
5491 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5493 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5498 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5502 VmaDeviceMemoryBlock* m_pBlock;
5503 bool m_HasNonMovableAllocations;
5504 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5506 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5508 m_HasNonMovableAllocations(true),
5509 m_Allocations(pAllocationCallbacks),
5510 m_pMappedDataForDefragmentation(VMA_NULL)
5514 void CalcHasNonMovableAllocations()
5516 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5517 const size_t defragmentAllocCount = m_Allocations.size();
5518 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5521 void SortAllocationsBySizeDescecnding()
5523 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5526 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5531 void* m_pMappedDataForDefragmentation;
5534 struct BlockPointerLess
5536 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5538 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5540 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5542 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5548 struct BlockInfoCompareMoveDestination
5550 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5552 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5556 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5560 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5568 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5569 BlockInfoVector m_Blocks;
5571 VkResult DefragmentRound(
5572 VkDeviceSize maxBytesToMove,
5573 uint32_t maxAllocationsToMove);
5575 static bool MoveMakesSense(
5576 size_t dstBlockIndex, VkDeviceSize dstOffset,
5577 size_t srcBlockIndex, VkDeviceSize srcOffset);
5582 VmaBlockVector* pBlockVector,
5583 uint32_t currentFrameIndex);
5585 ~VmaDefragmentator();
5587 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5588 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5590 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5592 VkResult Defragment(
5593 VkDeviceSize maxBytesToMove,
5594 uint32_t maxAllocationsToMove);
5597 #if VMA_RECORDING_ENABLED 5604 void WriteConfiguration(
5605 const VkPhysicalDeviceProperties& devProps,
5606 const VkPhysicalDeviceMemoryProperties& memProps,
5607 bool dedicatedAllocationExtensionEnabled);
5610 void RecordCreateAllocator(uint32_t frameIndex);
5611 void RecordDestroyAllocator(uint32_t frameIndex);
5612 void RecordCreatePool(uint32_t frameIndex,
5615 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5616 void RecordAllocateMemory(uint32_t frameIndex,
5617 const VkMemoryRequirements& vkMemReq,
5620 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5621 const VkMemoryRequirements& vkMemReq,
5622 bool requiresDedicatedAllocation,
5623 bool prefersDedicatedAllocation,
5626 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5627 const VkMemoryRequirements& vkMemReq,
5628 bool requiresDedicatedAllocation,
5629 bool prefersDedicatedAllocation,
5632 void RecordFreeMemory(uint32_t frameIndex,
5634 void RecordResizeAllocation(
5635 uint32_t frameIndex,
5637 VkDeviceSize newSize);
5638 void RecordSetAllocationUserData(uint32_t frameIndex,
5640 const void* pUserData);
5641 void RecordCreateLostAllocation(uint32_t frameIndex,
5643 void RecordMapMemory(uint32_t frameIndex,
5645 void RecordUnmapMemory(uint32_t frameIndex,
5647 void RecordFlushAllocation(uint32_t frameIndex,
5648 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5649 void RecordInvalidateAllocation(uint32_t frameIndex,
5650 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5651 void RecordCreateBuffer(uint32_t frameIndex,
5652 const VkBufferCreateInfo& bufCreateInfo,
5655 void RecordCreateImage(uint32_t frameIndex,
5656 const VkImageCreateInfo& imageCreateInfo,
5659 void RecordDestroyBuffer(uint32_t frameIndex,
5661 void RecordDestroyImage(uint32_t frameIndex,
5663 void RecordTouchAllocation(uint32_t frameIndex,
5665 void RecordGetAllocationInfo(uint32_t frameIndex,
5667 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5677 class UserDataString
5681 const char* GetString()
const {
return m_Str; }
5691 VMA_MUTEX m_FileMutex;
5693 int64_t m_StartCounter;
5695 void GetBasicParams(CallParams& outParams);
5699 #endif // #if VMA_RECORDING_ENABLED 5702 struct VmaAllocator_T
5704 VMA_CLASS_NO_COPY(VmaAllocator_T)
5707 bool m_UseKhrDedicatedAllocation;
5709 bool m_AllocationCallbacksSpecified;
5710 VkAllocationCallbacks m_AllocationCallbacks;
5714 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5715 VMA_MUTEX m_HeapSizeLimitMutex;
5717 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5718 VkPhysicalDeviceMemoryProperties m_MemProps;
5721 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5724 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5725 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5726 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5732 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5734 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5738 return m_VulkanFunctions;
5741 VkDeviceSize GetBufferImageGranularity()
const 5744 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5745 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5748 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5749 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5751 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5753 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5754 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5757 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5759 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5760 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5763 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5765 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5766 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5767 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5770 bool IsIntegratedGpu()
const 5772 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5775 #if VMA_RECORDING_ENABLED 5776 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5779 void GetBufferMemoryRequirements(
5781 VkMemoryRequirements& memReq,
5782 bool& requiresDedicatedAllocation,
5783 bool& prefersDedicatedAllocation)
const;
5784 void GetImageMemoryRequirements(
5786 VkMemoryRequirements& memReq,
5787 bool& requiresDedicatedAllocation,
5788 bool& prefersDedicatedAllocation)
const;
5791 VkResult AllocateMemory(
5792 const VkMemoryRequirements& vkMemReq,
5793 bool requiresDedicatedAllocation,
5794 bool prefersDedicatedAllocation,
5795 VkBuffer dedicatedBuffer,
5796 VkImage dedicatedImage,
5798 VmaSuballocationType suballocType,
5804 VkResult ResizeAllocation(
5806 VkDeviceSize newSize);
5808 void CalculateStats(
VmaStats* pStats);
5810 #if VMA_STATS_STRING_ENABLED 5811 void PrintDetailedMap(
class VmaJsonWriter& json);
5814 VkResult Defragment(
5816 size_t allocationCount,
5817 VkBool32* pAllocationsChanged,
5825 void DestroyPool(
VmaPool pool);
5828 void SetCurrentFrameIndex(uint32_t frameIndex);
5829 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5831 void MakePoolAllocationsLost(
5833 size_t* pLostAllocationCount);
5834 VkResult CheckPoolCorruption(
VmaPool hPool);
5835 VkResult CheckCorruption(uint32_t memoryTypeBits);
5839 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5840 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5845 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5846 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5848 void FlushOrInvalidateAllocation(
5850 VkDeviceSize offset, VkDeviceSize size,
5851 VMA_CACHE_OPERATION op);
5853 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5856 VkDeviceSize m_PreferredLargeHeapBlockSize;
5858 VkPhysicalDevice m_PhysicalDevice;
5859 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5861 VMA_MUTEX m_PoolsMutex;
5863 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5864 uint32_t m_NextPoolId;
5868 #if VMA_RECORDING_ENABLED 5869 VmaRecorder* m_pRecorder;
5874 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5876 VkResult AllocateMemoryOfType(
5878 VkDeviceSize alignment,
5879 bool dedicatedAllocation,
5880 VkBuffer dedicatedBuffer,
5881 VkImage dedicatedImage,
5883 uint32_t memTypeIndex,
5884 VmaSuballocationType suballocType,
5888 VkResult AllocateDedicatedMemory(
5890 VmaSuballocationType suballocType,
5891 uint32_t memTypeIndex,
5893 bool isUserDataString,
5895 VkBuffer dedicatedBuffer,
5896 VkImage dedicatedImage,
5906 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5908 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5911 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5913 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5916 template<
typename T>
5919 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5922 template<
typename T>
5923 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5925 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5928 template<
typename T>
5929 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5934 VmaFree(hAllocator, ptr);
5938 template<
typename T>
5939 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5943 for(
size_t i = count; i--; )
5945 VmaFree(hAllocator, ptr);
5952 #if VMA_STATS_STRING_ENABLED 5954 class VmaStringBuilder
5957 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5958 size_t GetLength()
const {
return m_Data.size(); }
5959 const char* GetData()
const {
return m_Data.data(); }
5961 void Add(
char ch) { m_Data.push_back(ch); }
5962 void Add(
const char* pStr);
5963 void AddNewLine() { Add(
'\n'); }
5964 void AddNumber(uint32_t num);
5965 void AddNumber(uint64_t num);
5966 void AddPointer(
const void* ptr);
5969 VmaVector< char, VmaStlAllocator<char> > m_Data;
5972 void VmaStringBuilder::Add(
const char* pStr)
5974 const size_t strLen = strlen(pStr);
5977 const size_t oldCount = m_Data.size();
5978 m_Data.resize(oldCount + strLen);
5979 memcpy(m_Data.data() + oldCount, pStr, strLen);
5983 void VmaStringBuilder::AddNumber(uint32_t num)
5986 VmaUint32ToStr(buf,
sizeof(buf), num);
5990 void VmaStringBuilder::AddNumber(uint64_t num)
5993 VmaUint64ToStr(buf,
sizeof(buf), num);
5997 void VmaStringBuilder::AddPointer(
const void* ptr)
6000 VmaPtrToStr(buf,
sizeof(buf), ptr);
6004 #endif // #if VMA_STATS_STRING_ENABLED 6009 #if VMA_STATS_STRING_ENABLED 6013 VMA_CLASS_NO_COPY(VmaJsonWriter)
6015 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6018 void BeginObject(
bool singleLine =
false);
6021 void BeginArray(
bool singleLine =
false);
6024 void WriteString(
const char* pStr);
6025 void BeginString(
const char* pStr = VMA_NULL);
6026 void ContinueString(
const char* pStr);
6027 void ContinueString(uint32_t n);
6028 void ContinueString(uint64_t n);
6029 void ContinueString_Pointer(
const void* ptr);
6030 void EndString(
const char* pStr = VMA_NULL);
6032 void WriteNumber(uint32_t n);
6033 void WriteNumber(uint64_t n);
6034 void WriteBool(
bool b);
6038 static const char*
const INDENT;
6040 enum COLLECTION_TYPE
6042 COLLECTION_TYPE_OBJECT,
6043 COLLECTION_TYPE_ARRAY,
6047 COLLECTION_TYPE type;
6048 uint32_t valueCount;
6049 bool singleLineMode;
6052 VmaStringBuilder& m_SB;
6053 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6054 bool m_InsideString;
6056 void BeginValue(
bool isString);
6057 void WriteIndent(
bool oneLess =
false);
6060 const char*
const VmaJsonWriter::INDENT =
" ";
6062 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6064 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6065 m_InsideString(false)
6069 VmaJsonWriter::~VmaJsonWriter()
6071 VMA_ASSERT(!m_InsideString);
6072 VMA_ASSERT(m_Stack.empty());
6075 void VmaJsonWriter::BeginObject(
bool singleLine)
6077 VMA_ASSERT(!m_InsideString);
6083 item.type = COLLECTION_TYPE_OBJECT;
6084 item.valueCount = 0;
6085 item.singleLineMode = singleLine;
6086 m_Stack.push_back(item);
6089 void VmaJsonWriter::EndObject()
6091 VMA_ASSERT(!m_InsideString);
6096 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6100 void VmaJsonWriter::BeginArray(
bool singleLine)
6102 VMA_ASSERT(!m_InsideString);
6108 item.type = COLLECTION_TYPE_ARRAY;
6109 item.valueCount = 0;
6110 item.singleLineMode = singleLine;
6111 m_Stack.push_back(item);
6114 void VmaJsonWriter::EndArray()
6116 VMA_ASSERT(!m_InsideString);
6121 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6125 void VmaJsonWriter::WriteString(
const char* pStr)
6131 void VmaJsonWriter::BeginString(
const char* pStr)
6133 VMA_ASSERT(!m_InsideString);
6137 m_InsideString =
true;
6138 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6140 ContinueString(pStr);
6144 void VmaJsonWriter::ContinueString(
const char* pStr)
6146 VMA_ASSERT(m_InsideString);
6148 const size_t strLen = strlen(pStr);
6149 for(
size_t i = 0; i < strLen; ++i)
6182 VMA_ASSERT(0 &&
"Character not currently supported.");
6188 void VmaJsonWriter::ContinueString(uint32_t n)
6190 VMA_ASSERT(m_InsideString);
6194 void VmaJsonWriter::ContinueString(uint64_t n)
6196 VMA_ASSERT(m_InsideString);
6200 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6202 VMA_ASSERT(m_InsideString);
6203 m_SB.AddPointer(ptr);
6206 void VmaJsonWriter::EndString(
const char* pStr)
6208 VMA_ASSERT(m_InsideString);
6209 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6211 ContinueString(pStr);
6214 m_InsideString =
false;
6217 void VmaJsonWriter::WriteNumber(uint32_t n)
6219 VMA_ASSERT(!m_InsideString);
6224 void VmaJsonWriter::WriteNumber(uint64_t n)
6226 VMA_ASSERT(!m_InsideString);
6231 void VmaJsonWriter::WriteBool(
bool b)
6233 VMA_ASSERT(!m_InsideString);
6235 m_SB.Add(b ?
"true" :
"false");
6238 void VmaJsonWriter::WriteNull()
6240 VMA_ASSERT(!m_InsideString);
6245 void VmaJsonWriter::BeginValue(
bool isString)
6247 if(!m_Stack.empty())
6249 StackItem& currItem = m_Stack.back();
6250 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6251 currItem.valueCount % 2 == 0)
6253 VMA_ASSERT(isString);
6256 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6257 currItem.valueCount % 2 != 0)
6261 else if(currItem.valueCount > 0)
6270 ++currItem.valueCount;
6274 void VmaJsonWriter::WriteIndent(
bool oneLess)
6276 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6280 size_t count = m_Stack.size();
6281 if(count > 0 && oneLess)
6285 for(
size_t i = 0; i < count; ++i)
6292 #endif // #if VMA_STATS_STRING_ENABLED 6296 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
6298 if(IsUserDataString())
6300 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
6302 FreeUserDataString(hAllocator);
6304 if(pUserData != VMA_NULL)
6306 const char*
const newStrSrc = (
char*)pUserData;
6307 const size_t newStrLen = strlen(newStrSrc);
6308 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
6309 memcpy(newStrDst, newStrSrc, newStrLen + 1);
6310 m_pUserData = newStrDst;
6315 m_pUserData = pUserData;
6319 void VmaAllocation_T::ChangeBlockAllocation(
6321 VmaDeviceMemoryBlock* block,
6322 VkDeviceSize offset)
6324 VMA_ASSERT(block != VMA_NULL);
6325 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6328 if(block != m_BlockAllocation.m_Block)
6330 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
6331 if(IsPersistentMap())
6333 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
6334 block->Map(hAllocator, mapRefCount, VMA_NULL);
6337 m_BlockAllocation.m_Block = block;
6338 m_BlockAllocation.m_Offset = offset;
6341 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
6343 VMA_ASSERT(newSize > 0);
6347 VkDeviceSize VmaAllocation_T::GetOffset()
const 6351 case ALLOCATION_TYPE_BLOCK:
6352 return m_BlockAllocation.m_Offset;
6353 case ALLOCATION_TYPE_DEDICATED:
6361 VkDeviceMemory VmaAllocation_T::GetMemory()
const 6365 case ALLOCATION_TYPE_BLOCK:
6366 return m_BlockAllocation.m_Block->GetDeviceMemory();
6367 case ALLOCATION_TYPE_DEDICATED:
6368 return m_DedicatedAllocation.m_hMemory;
6371 return VK_NULL_HANDLE;
6375 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 6379 case ALLOCATION_TYPE_BLOCK:
6380 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
6381 case ALLOCATION_TYPE_DEDICATED:
6382 return m_DedicatedAllocation.m_MemoryTypeIndex;
6389 void* VmaAllocation_T::GetMappedData()
const 6393 case ALLOCATION_TYPE_BLOCK:
6396 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
6397 VMA_ASSERT(pBlockData != VMA_NULL);
6398 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
6405 case ALLOCATION_TYPE_DEDICATED:
6406 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
6407 return m_DedicatedAllocation.m_pMappedData;
6414 bool VmaAllocation_T::CanBecomeLost()
const 6418 case ALLOCATION_TYPE_BLOCK:
6419 return m_BlockAllocation.m_CanBecomeLost;
6420 case ALLOCATION_TYPE_DEDICATED:
6428 VmaPool VmaAllocation_T::GetPool()
const 6430 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6431 return m_BlockAllocation.m_hPool;
6434 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6436 VMA_ASSERT(CanBecomeLost());
6442 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6445 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6450 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6456 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6466 #if VMA_STATS_STRING_ENABLED 6469 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6478 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6480 json.WriteString(
"Type");
6481 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6483 json.WriteString(
"Size");
6484 json.WriteNumber(m_Size);
6486 if(m_pUserData != VMA_NULL)
6488 json.WriteString(
"UserData");
6489 if(IsUserDataString())
6491 json.WriteString((
const char*)m_pUserData);
6496 json.ContinueString_Pointer(m_pUserData);
6501 json.WriteString(
"CreationFrameIndex");
6502 json.WriteNumber(m_CreationFrameIndex);
6504 json.WriteString(
"LastUseFrameIndex");
6505 json.WriteNumber(GetLastUseFrameIndex());
6507 if(m_BufferImageUsage != 0)
6509 json.WriteString(
"Usage");
6510 json.WriteNumber(m_BufferImageUsage);
6516 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6518 VMA_ASSERT(IsUserDataString());
6519 if(m_pUserData != VMA_NULL)
6521 char*
const oldStr = (
char*)m_pUserData;
6522 const size_t oldStrLen = strlen(oldStr);
6523 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6524 m_pUserData = VMA_NULL;
6528 void VmaAllocation_T::BlockAllocMap()
6530 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6532 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6538 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6542 void VmaAllocation_T::BlockAllocUnmap()
6544 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6546 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6552 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6556 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6558 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6562 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6564 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6565 *ppData = m_DedicatedAllocation.m_pMappedData;
6571 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6572 return VK_ERROR_MEMORY_MAP_FAILED;
6577 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6578 hAllocator->m_hDevice,
6579 m_DedicatedAllocation.m_hMemory,
6584 if(result == VK_SUCCESS)
6586 m_DedicatedAllocation.m_pMappedData = *ppData;
6593 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6595 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6597 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6602 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6603 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6604 hAllocator->m_hDevice,
6605 m_DedicatedAllocation.m_hMemory);
6610 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6614 #if VMA_STATS_STRING_ENABLED 6616 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6620 json.WriteString(
"Blocks");
6623 json.WriteString(
"Allocations");
6626 json.WriteString(
"UnusedRanges");
6629 json.WriteString(
"UsedBytes");
6632 json.WriteString(
"UnusedBytes");
6637 json.WriteString(
"AllocationSize");
6638 json.BeginObject(
true);
6639 json.WriteString(
"Min");
6641 json.WriteString(
"Avg");
6643 json.WriteString(
"Max");
6650 json.WriteString(
"UnusedRangeSize");
6651 json.BeginObject(
true);
6652 json.WriteString(
"Min");
6654 json.WriteString(
"Avg");
6656 json.WriteString(
"Max");
6664 #endif // #if VMA_STATS_STRING_ENABLED 6666 struct VmaSuballocationItemSizeLess
6669 const VmaSuballocationList::iterator lhs,
6670 const VmaSuballocationList::iterator rhs)
const 6672 return lhs->size < rhs->size;
6675 const VmaSuballocationList::iterator lhs,
6676 VkDeviceSize rhsSize)
const 6678 return lhs->size < rhsSize;
6686 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
6688 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
6692 #if VMA_STATS_STRING_ENABLED 6694 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6695 VkDeviceSize unusedBytes,
6696 size_t allocationCount,
6697 size_t unusedRangeCount)
const 6701 json.WriteString(
"TotalBytes");
6702 json.WriteNumber(GetSize());
6704 json.WriteString(
"UnusedBytes");
6705 json.WriteNumber(unusedBytes);
6707 json.WriteString(
"Allocations");
6708 json.WriteNumber((uint64_t)allocationCount);
6710 json.WriteString(
"UnusedRanges");
6711 json.WriteNumber((uint64_t)unusedRangeCount);
6713 json.WriteString(
"Suballocations");
6717 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6718 VkDeviceSize offset,
6721 json.BeginObject(
true);
6723 json.WriteString(
"Offset");
6724 json.WriteNumber(offset);
6726 hAllocation->PrintParameters(json);
6731 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6732 VkDeviceSize offset,
6733 VkDeviceSize size)
const 6735 json.BeginObject(
true);
6737 json.WriteString(
"Offset");
6738 json.WriteNumber(offset);
6740 json.WriteString(
"Type");
6741 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6743 json.WriteString(
"Size");
6744 json.WriteNumber(size);
6749 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6755 #endif // #if VMA_STATS_STRING_ENABLED 6760 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6761 VmaBlockMetadata(hAllocator),
6764 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6765 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6769 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6773 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6775 VmaBlockMetadata::Init(size);
6778 m_SumFreeSize = size;
6780 VmaSuballocation suballoc = {};
6781 suballoc.offset = 0;
6782 suballoc.size = size;
6783 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6784 suballoc.hAllocation = VK_NULL_HANDLE;
6786 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
6787 m_Suballocations.push_back(suballoc);
6788 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6790 m_FreeSuballocationsBySize.push_back(suballocItem);
6793 bool VmaBlockMetadata_Generic::Validate()
const 6795 VMA_VALIDATE(!m_Suballocations.empty());
6798 VkDeviceSize calculatedOffset = 0;
6800 uint32_t calculatedFreeCount = 0;
6802 VkDeviceSize calculatedSumFreeSize = 0;
6805 size_t freeSuballocationsToRegister = 0;
6807 bool prevFree =
false;
6809 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6810 suballocItem != m_Suballocations.cend();
6813 const VmaSuballocation& subAlloc = *suballocItem;
6816 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
6818 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6820 VMA_VALIDATE(!prevFree || !currFree);
6822 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
6826 calculatedSumFreeSize += subAlloc.size;
6827 ++calculatedFreeCount;
6828 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6830 ++freeSuballocationsToRegister;
6834 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
6838 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
6839 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
6842 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
6845 calculatedOffset += subAlloc.size;
6846 prevFree = currFree;
6851 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
6853 VkDeviceSize lastSize = 0;
6854 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6856 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6859 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6861 VMA_VALIDATE(suballocItem->size >= lastSize);
6863 lastSize = suballocItem->size;
6867 VMA_VALIDATE(ValidateFreeSuballocationList());
6868 VMA_VALIDATE(calculatedOffset == GetSize());
6869 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
6870 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
6875 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6877 if(!m_FreeSuballocationsBySize.empty())
6879 return m_FreeSuballocationsBySize.back()->size;
6887 bool VmaBlockMetadata_Generic::IsEmpty()
const 6889 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6892 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6896 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6908 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6909 suballocItem != m_Suballocations.cend();
6912 const VmaSuballocation& suballoc = *suballocItem;
6913 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6926 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6928 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6930 inoutStats.
size += GetSize();
6937 #if VMA_STATS_STRING_ENABLED 6939 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6941 PrintDetailedMap_Begin(json,
6943 m_Suballocations.size() - (size_t)m_FreeCount,
6947 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6948 suballocItem != m_Suballocations.cend();
6949 ++suballocItem, ++i)
6951 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6953 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6957 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6961 PrintDetailedMap_End(json);
6964 #endif // #if VMA_STATS_STRING_ENABLED 6966 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6967 uint32_t currentFrameIndex,
6968 uint32_t frameInUseCount,
6969 VkDeviceSize bufferImageGranularity,
6970 VkDeviceSize allocSize,
6971 VkDeviceSize allocAlignment,
6973 VmaSuballocationType allocType,
6974 bool canMakeOtherLost,
6976 VmaAllocationRequest* pAllocationRequest)
6978 VMA_ASSERT(allocSize > 0);
6979 VMA_ASSERT(!upperAddress);
6980 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6981 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6982 VMA_HEAVY_ASSERT(Validate());
6985 if(canMakeOtherLost ==
false &&
6986 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6992 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6993 if(freeSuballocCount > 0)
6998 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6999 m_FreeSuballocationsBySize.data(),
7000 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7001 allocSize + 2 * VMA_DEBUG_MARGIN,
7002 VmaSuballocationItemSizeLess());
7003 size_t index = it - m_FreeSuballocationsBySize.data();
7004 for(; index < freeSuballocCount; ++index)
7009 bufferImageGranularity,
7013 m_FreeSuballocationsBySize[index],
7015 &pAllocationRequest->offset,
7016 &pAllocationRequest->itemsToMakeLostCount,
7017 &pAllocationRequest->sumFreeSize,
7018 &pAllocationRequest->sumItemSize))
7020 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7028 for(
size_t index = freeSuballocCount; index--; )
7033 bufferImageGranularity,
7037 m_FreeSuballocationsBySize[index],
7039 &pAllocationRequest->offset,
7040 &pAllocationRequest->itemsToMakeLostCount,
7041 &pAllocationRequest->sumFreeSize,
7042 &pAllocationRequest->sumItemSize))
7044 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7051 if(canMakeOtherLost)
7055 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
7056 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
7058 VmaAllocationRequest tmpAllocRequest = {};
7059 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7060 suballocIt != m_Suballocations.end();
7063 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7064 suballocIt->hAllocation->CanBecomeLost())
7069 bufferImageGranularity,
7075 &tmpAllocRequest.offset,
7076 &tmpAllocRequest.itemsToMakeLostCount,
7077 &tmpAllocRequest.sumFreeSize,
7078 &tmpAllocRequest.sumItemSize))
7080 tmpAllocRequest.item = suballocIt;
7082 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7085 *pAllocationRequest = tmpAllocRequest;
7091 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7100 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7101 uint32_t currentFrameIndex,
7102 uint32_t frameInUseCount,
7103 VmaAllocationRequest* pAllocationRequest)
7105 while(pAllocationRequest->itemsToMakeLostCount > 0)
7107 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7109 ++pAllocationRequest->item;
7111 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7112 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7113 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7114 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7116 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7117 --pAllocationRequest->itemsToMakeLostCount;
7125 VMA_HEAVY_ASSERT(Validate());
7126 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7127 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7132 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7134 uint32_t lostAllocationCount = 0;
7135 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7136 it != m_Suballocations.end();
7139 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7140 it->hAllocation->CanBecomeLost() &&
7141 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7143 it = FreeSuballocation(it);
7144 ++lostAllocationCount;
7147 return lostAllocationCount;
7150 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7152 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7153 it != m_Suballocations.end();
7156 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7158 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7160 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7161 return VK_ERROR_VALIDATION_FAILED_EXT;
7163 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7165 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7166 return VK_ERROR_VALIDATION_FAILED_EXT;
7174 void VmaBlockMetadata_Generic::Alloc(
7175 const VmaAllocationRequest& request,
7176 VmaSuballocationType type,
7177 VkDeviceSize allocSize,
7181 VMA_ASSERT(!upperAddress);
7182 VMA_ASSERT(request.item != m_Suballocations.end());
7183 VmaSuballocation& suballoc = *request.item;
7185 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7187 VMA_ASSERT(request.offset >= suballoc.offset);
7188 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7189 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7190 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7194 UnregisterFreeSuballocation(request.item);
7196 suballoc.offset = request.offset;
7197 suballoc.size = allocSize;
7198 suballoc.type = type;
7199 suballoc.hAllocation = hAllocation;
7204 VmaSuballocation paddingSuballoc = {};
7205 paddingSuballoc.offset = request.offset + allocSize;
7206 paddingSuballoc.size = paddingEnd;
7207 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7208 VmaSuballocationList::iterator next = request.item;
7210 const VmaSuballocationList::iterator paddingEndItem =
7211 m_Suballocations.insert(next, paddingSuballoc);
7212 RegisterFreeSuballocation(paddingEndItem);
7218 VmaSuballocation paddingSuballoc = {};
7219 paddingSuballoc.offset = request.offset - paddingBegin;
7220 paddingSuballoc.size = paddingBegin;
7221 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7222 const VmaSuballocationList::iterator paddingBeginItem =
7223 m_Suballocations.insert(request.item, paddingSuballoc);
7224 RegisterFreeSuballocation(paddingBeginItem);
7228 m_FreeCount = m_FreeCount - 1;
7229 if(paddingBegin > 0)
7237 m_SumFreeSize -= allocSize;
7240 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7242 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7243 suballocItem != m_Suballocations.end();
7246 VmaSuballocation& suballoc = *suballocItem;
7247 if(suballoc.hAllocation == allocation)
7249 FreeSuballocation(suballocItem);
7250 VMA_HEAVY_ASSERT(Validate());
7254 VMA_ASSERT(0 &&
"Not found!");
7257 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7259 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7260 suballocItem != m_Suballocations.end();
7263 VmaSuballocation& suballoc = *suballocItem;
7264 if(suballoc.offset == offset)
7266 FreeSuballocation(suballocItem);
7270 VMA_ASSERT(0 &&
"Not found!");
7273 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
7275 typedef VmaSuballocationList::iterator iter_type;
7276 for(iter_type suballocItem = m_Suballocations.begin();
7277 suballocItem != m_Suballocations.end();
7280 VmaSuballocation& suballoc = *suballocItem;
7281 if(suballoc.hAllocation == alloc)
7283 iter_type nextItem = suballocItem;
7287 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
7290 if(newSize < alloc->GetSize())
7292 const VkDeviceSize sizeDiff = suballoc.size - newSize;
7295 if(nextItem != m_Suballocations.end())
7298 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7301 UnregisterFreeSuballocation(nextItem);
7302 nextItem->offset -= sizeDiff;
7303 nextItem->size += sizeDiff;
7304 RegisterFreeSuballocation(nextItem);
7310 VmaSuballocation newFreeSuballoc;
7311 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
7312 newFreeSuballoc.offset = suballoc.offset + newSize;
7313 newFreeSuballoc.size = sizeDiff;
7314 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7315 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
7316 RegisterFreeSuballocation(newFreeSuballocIt);
7325 VmaSuballocation newFreeSuballoc;
7326 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
7327 newFreeSuballoc.offset = suballoc.offset + newSize;
7328 newFreeSuballoc.size = sizeDiff;
7329 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7330 m_Suballocations.push_back(newFreeSuballoc);
7332 iter_type newFreeSuballocIt = m_Suballocations.end();
7333 RegisterFreeSuballocation(--newFreeSuballocIt);
7338 suballoc.size = newSize;
7339 m_SumFreeSize += sizeDiff;
7344 const VkDeviceSize sizeDiff = newSize - suballoc.size;
7347 if(nextItem != m_Suballocations.end())
7350 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7353 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
7359 if(nextItem->size > sizeDiff)
7362 UnregisterFreeSuballocation(nextItem);
7363 nextItem->offset += sizeDiff;
7364 nextItem->size -= sizeDiff;
7365 RegisterFreeSuballocation(nextItem);
7371 UnregisterFreeSuballocation(nextItem);
7372 m_Suballocations.erase(nextItem);
7388 suballoc.size = newSize;
7389 m_SumFreeSize -= sizeDiff;
7396 VMA_ASSERT(0 &&
"Not found!");
7400 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 7402 VkDeviceSize lastSize = 0;
7403 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7405 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7407 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
7408 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7409 VMA_VALIDATE(it->size >= lastSize);
7410 lastSize = it->size;
7415 bool VmaBlockMetadata_Generic::CheckAllocation(
7416 uint32_t currentFrameIndex,
7417 uint32_t frameInUseCount,
7418 VkDeviceSize bufferImageGranularity,
7419 VkDeviceSize allocSize,
7420 VkDeviceSize allocAlignment,
7421 VmaSuballocationType allocType,
7422 VmaSuballocationList::const_iterator suballocItem,
7423 bool canMakeOtherLost,
7424 VkDeviceSize* pOffset,
7425 size_t* itemsToMakeLostCount,
7426 VkDeviceSize* pSumFreeSize,
7427 VkDeviceSize* pSumItemSize)
const 7429 VMA_ASSERT(allocSize > 0);
7430 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7431 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7432 VMA_ASSERT(pOffset != VMA_NULL);
7434 *itemsToMakeLostCount = 0;
7438 if(canMakeOtherLost)
7440 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7442 *pSumFreeSize = suballocItem->size;
7446 if(suballocItem->hAllocation->CanBecomeLost() &&
7447 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7449 ++*itemsToMakeLostCount;
7450 *pSumItemSize = suballocItem->size;
7459 if(GetSize() - suballocItem->offset < allocSize)
7465 *pOffset = suballocItem->offset;
7468 if(VMA_DEBUG_MARGIN > 0)
7470 *pOffset += VMA_DEBUG_MARGIN;
7474 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7478 if(bufferImageGranularity > 1)
7480 bool bufferImageGranularityConflict =
false;
7481 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7482 while(prevSuballocItem != m_Suballocations.cbegin())
7485 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7486 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7488 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7490 bufferImageGranularityConflict =
true;
7498 if(bufferImageGranularityConflict)
7500 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7506 if(*pOffset >= suballocItem->offset + suballocItem->size)
7512 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
7515 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7517 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
7519 if(suballocItem->offset + totalSize > GetSize())
7526 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7527 if(totalSize > suballocItem->size)
7529 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7530 while(remainingSize > 0)
7533 if(lastSuballocItem == m_Suballocations.cend())
7537 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7539 *pSumFreeSize += lastSuballocItem->size;
7543 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7544 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7545 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7547 ++*itemsToMakeLostCount;
7548 *pSumItemSize += lastSuballocItem->size;
7555 remainingSize = (lastSuballocItem->size < remainingSize) ?
7556 remainingSize - lastSuballocItem->size : 0;
7562 if(bufferImageGranularity > 1)
7564 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7566 while(nextSuballocItem != m_Suballocations.cend())
7568 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7569 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7571 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7573 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7574 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7575 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7577 ++*itemsToMakeLostCount;
7596 const VmaSuballocation& suballoc = *suballocItem;
7597 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7599 *pSumFreeSize = suballoc.size;
7602 if(suballoc.size < allocSize)
7608 *pOffset = suballoc.offset;
7611 if(VMA_DEBUG_MARGIN > 0)
7613 *pOffset += VMA_DEBUG_MARGIN;
7617 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7621 if(bufferImageGranularity > 1)
7623 bool bufferImageGranularityConflict =
false;
7624 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7625 while(prevSuballocItem != m_Suballocations.cbegin())
7628 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7629 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7631 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7633 bufferImageGranularityConflict =
true;
7641 if(bufferImageGranularityConflict)
7643 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7648 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7651 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7654 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7661 if(bufferImageGranularity > 1)
7663 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7665 while(nextSuballocItem != m_Suballocations.cend())
7667 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7668 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7670 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7689 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7691 VMA_ASSERT(item != m_Suballocations.end());
7692 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7694 VmaSuballocationList::iterator nextItem = item;
7696 VMA_ASSERT(nextItem != m_Suballocations.end());
7697 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7699 item->size += nextItem->size;
7701 m_Suballocations.erase(nextItem);
7704 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7707 VmaSuballocation& suballoc = *suballocItem;
7708 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7709 suballoc.hAllocation = VK_NULL_HANDLE;
7713 m_SumFreeSize += suballoc.size;
7716 bool mergeWithNext =
false;
7717 bool mergeWithPrev =
false;
7719 VmaSuballocationList::iterator nextItem = suballocItem;
7721 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7723 mergeWithNext =
true;
7726 VmaSuballocationList::iterator prevItem = suballocItem;
7727 if(suballocItem != m_Suballocations.begin())
7730 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7732 mergeWithPrev =
true;
7738 UnregisterFreeSuballocation(nextItem);
7739 MergeFreeWithNext(suballocItem);
7744 UnregisterFreeSuballocation(prevItem);
7745 MergeFreeWithNext(prevItem);
7746 RegisterFreeSuballocation(prevItem);
7751 RegisterFreeSuballocation(suballocItem);
7752 return suballocItem;
7756 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7758 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7759 VMA_ASSERT(item->size > 0);
7763 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7765 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7767 if(m_FreeSuballocationsBySize.empty())
7769 m_FreeSuballocationsBySize.push_back(item);
7773 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7781 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7783 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7784 VMA_ASSERT(item->size > 0);
7788 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7790 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7792 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7793 m_FreeSuballocationsBySize.data(),
7794 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7796 VmaSuballocationItemSizeLess());
7797 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7798 index < m_FreeSuballocationsBySize.size();
7801 if(m_FreeSuballocationsBySize[index] == item)
7803 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7806 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7808 VMA_ASSERT(0 &&
"Not found.");
7817 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7818 VmaBlockMetadata(hAllocator),
7820 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7821 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7822 m_1stVectorIndex(0),
7823 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7824 m_1stNullItemsBeginCount(0),
7825 m_1stNullItemsMiddleCount(0),
7826 m_2ndNullItemsCount(0)
7830 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7834 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7836 VmaBlockMetadata::Init(size);
7837 m_SumFreeSize = size;
7840 bool VmaBlockMetadata_Linear::Validate()
const 7842 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7843 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7845 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
7846 VMA_VALIDATE(!suballocations1st.empty() ||
7847 suballocations2nd.empty() ||
7848 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
7850 if(!suballocations1st.empty())
7853 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
7855 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
7857 if(!suballocations2nd.empty())
7860 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
7863 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
7864 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
7866 VkDeviceSize sumUsedSize = 0;
7867 const size_t suballoc1stCount = suballocations1st.size();
7868 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7870 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7872 const size_t suballoc2ndCount = suballocations2nd.size();
7873 size_t nullItem2ndCount = 0;
7874 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7876 const VmaSuballocation& suballoc = suballocations2nd[i];
7877 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7879 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7880 VMA_VALIDATE(suballoc.offset >= offset);
7884 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7885 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7886 sumUsedSize += suballoc.size;
7893 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7896 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7899 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7901 const VmaSuballocation& suballoc = suballocations1st[i];
7902 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
7903 suballoc.hAllocation == VK_NULL_HANDLE);
7906 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7908 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7910 const VmaSuballocation& suballoc = suballocations1st[i];
7911 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7913 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7914 VMA_VALIDATE(suballoc.offset >= offset);
7915 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
7919 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7920 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7921 sumUsedSize += suballoc.size;
7928 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7930 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
7932 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7934 const size_t suballoc2ndCount = suballocations2nd.size();
7935 size_t nullItem2ndCount = 0;
7936 for(
size_t i = suballoc2ndCount; i--; )
7938 const VmaSuballocation& suballoc = suballocations2nd[i];
7939 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7941 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7942 VMA_VALIDATE(suballoc.offset >= offset);
7946 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7947 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7948 sumUsedSize += suballoc.size;
7955 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7958 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7961 VMA_VALIDATE(offset <= GetSize());
7962 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
7967 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7969 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7970 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7973 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7975 const VkDeviceSize size = GetSize();
7987 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7989 switch(m_2ndVectorMode)
7991 case SECOND_VECTOR_EMPTY:
7997 const size_t suballocations1stCount = suballocations1st.size();
7998 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
7999 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8000 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8002 firstSuballoc.offset,
8003 size - (lastSuballoc.offset + lastSuballoc.size));
8007 case SECOND_VECTOR_RING_BUFFER:
8012 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8013 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8014 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8015 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
8019 case SECOND_VECTOR_DOUBLE_STACK:
8024 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8025 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
8026 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
8027 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
8037 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 8039 const VkDeviceSize size = GetSize();
8040 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8041 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8042 const size_t suballoc1stCount = suballocations1st.size();
8043 const size_t suballoc2ndCount = suballocations2nd.size();
8054 VkDeviceSize lastOffset = 0;
8056 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8058 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8059 size_t nextAlloc2ndIndex = 0;
8060 while(lastOffset < freeSpace2ndTo1stEnd)
8063 while(nextAlloc2ndIndex < suballoc2ndCount &&
8064 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8066 ++nextAlloc2ndIndex;
8070 if(nextAlloc2ndIndex < suballoc2ndCount)
8072 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8075 if(lastOffset < suballoc.offset)
8078 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8092 lastOffset = suballoc.offset + suballoc.size;
8093 ++nextAlloc2ndIndex;
8099 if(lastOffset < freeSpace2ndTo1stEnd)
8101 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8109 lastOffset = freeSpace2ndTo1stEnd;
8114 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8115 const VkDeviceSize freeSpace1stTo2ndEnd =
8116 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8117 while(lastOffset < freeSpace1stTo2ndEnd)
8120 while(nextAlloc1stIndex < suballoc1stCount &&
8121 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8123 ++nextAlloc1stIndex;
8127 if(nextAlloc1stIndex < suballoc1stCount)
8129 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8132 if(lastOffset < suballoc.offset)
8135 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8149 lastOffset = suballoc.offset + suballoc.size;
8150 ++nextAlloc1stIndex;
8156 if(lastOffset < freeSpace1stTo2ndEnd)
8158 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8166 lastOffset = freeSpace1stTo2ndEnd;
8170 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8172 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8173 while(lastOffset < size)
8176 while(nextAlloc2ndIndex != SIZE_MAX &&
8177 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8179 --nextAlloc2ndIndex;
8183 if(nextAlloc2ndIndex != SIZE_MAX)
8185 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8188 if(lastOffset < suballoc.offset)
8191 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8205 lastOffset = suballoc.offset + suballoc.size;
8206 --nextAlloc2ndIndex;
8212 if(lastOffset < size)
8214 const VkDeviceSize unusedRangeSize = size - lastOffset;
8230 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8232 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8233 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8234 const VkDeviceSize size = GetSize();
8235 const size_t suballoc1stCount = suballocations1st.size();
8236 const size_t suballoc2ndCount = suballocations2nd.size();
8238 inoutStats.
size += size;
8240 VkDeviceSize lastOffset = 0;
8242 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8244 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8245 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8246 while(lastOffset < freeSpace2ndTo1stEnd)
8249 while(nextAlloc2ndIndex < suballoc2ndCount &&
8250 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8252 ++nextAlloc2ndIndex;
8256 if(nextAlloc2ndIndex < suballoc2ndCount)
8258 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8261 if(lastOffset < suballoc.offset)
8264 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8275 lastOffset = suballoc.offset + suballoc.size;
8276 ++nextAlloc2ndIndex;
8281 if(lastOffset < freeSpace2ndTo1stEnd)
8284 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8291 lastOffset = freeSpace2ndTo1stEnd;
8296 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8297 const VkDeviceSize freeSpace1stTo2ndEnd =
8298 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8299 while(lastOffset < freeSpace1stTo2ndEnd)
8302 while(nextAlloc1stIndex < suballoc1stCount &&
8303 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8305 ++nextAlloc1stIndex;
8309 if(nextAlloc1stIndex < suballoc1stCount)
8311 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8314 if(lastOffset < suballoc.offset)
8317 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8328 lastOffset = suballoc.offset + suballoc.size;
8329 ++nextAlloc1stIndex;
8334 if(lastOffset < freeSpace1stTo2ndEnd)
8337 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8344 lastOffset = freeSpace1stTo2ndEnd;
8348 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8350 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8351 while(lastOffset < size)
8354 while(nextAlloc2ndIndex != SIZE_MAX &&
8355 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8357 --nextAlloc2ndIndex;
8361 if(nextAlloc2ndIndex != SIZE_MAX)
8363 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8366 if(lastOffset < suballoc.offset)
8369 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8380 lastOffset = suballoc.offset + suballoc.size;
8381 --nextAlloc2ndIndex;
8386 if(lastOffset < size)
8389 const VkDeviceSize unusedRangeSize = size - lastOffset;
8402 #if VMA_STATS_STRING_ENABLED 8403 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 8405 const VkDeviceSize size = GetSize();
8406 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8407 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8408 const size_t suballoc1stCount = suballocations1st.size();
8409 const size_t suballoc2ndCount = suballocations2nd.size();
8413 size_t unusedRangeCount = 0;
8414 VkDeviceSize usedBytes = 0;
8416 VkDeviceSize lastOffset = 0;
8418 size_t alloc2ndCount = 0;
8419 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8421 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8422 size_t nextAlloc2ndIndex = 0;
8423 while(lastOffset < freeSpace2ndTo1stEnd)
8426 while(nextAlloc2ndIndex < suballoc2ndCount &&
8427 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8429 ++nextAlloc2ndIndex;
8433 if(nextAlloc2ndIndex < suballoc2ndCount)
8435 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8438 if(lastOffset < suballoc.offset)
8447 usedBytes += suballoc.size;
8450 lastOffset = suballoc.offset + suballoc.size;
8451 ++nextAlloc2ndIndex;
8456 if(lastOffset < freeSpace2ndTo1stEnd)
8463 lastOffset = freeSpace2ndTo1stEnd;
8468 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8469 size_t alloc1stCount = 0;
8470 const VkDeviceSize freeSpace1stTo2ndEnd =
8471 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8472 while(lastOffset < freeSpace1stTo2ndEnd)
8475 while(nextAlloc1stIndex < suballoc1stCount &&
8476 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8478 ++nextAlloc1stIndex;
8482 if(nextAlloc1stIndex < suballoc1stCount)
8484 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8487 if(lastOffset < suballoc.offset)
8496 usedBytes += suballoc.size;
8499 lastOffset = suballoc.offset + suballoc.size;
8500 ++nextAlloc1stIndex;
8505 if(lastOffset < size)
8512 lastOffset = freeSpace1stTo2ndEnd;
8516 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8518 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8519 while(lastOffset < size)
8522 while(nextAlloc2ndIndex != SIZE_MAX &&
8523 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8525 --nextAlloc2ndIndex;
8529 if(nextAlloc2ndIndex != SIZE_MAX)
8531 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8534 if(lastOffset < suballoc.offset)
8543 usedBytes += suballoc.size;
8546 lastOffset = suballoc.offset + suballoc.size;
8547 --nextAlloc2ndIndex;
8552 if(lastOffset < size)
8564 const VkDeviceSize unusedBytes = size - usedBytes;
8565 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8570 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8572 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8573 size_t nextAlloc2ndIndex = 0;
8574 while(lastOffset < freeSpace2ndTo1stEnd)
8577 while(nextAlloc2ndIndex < suballoc2ndCount &&
8578 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8580 ++nextAlloc2ndIndex;
8584 if(nextAlloc2ndIndex < suballoc2ndCount)
8586 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8589 if(lastOffset < suballoc.offset)
8592 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8593 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8598 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8601 lastOffset = suballoc.offset + suballoc.size;
8602 ++nextAlloc2ndIndex;
8607 if(lastOffset < freeSpace2ndTo1stEnd)
8610 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8611 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8615 lastOffset = freeSpace2ndTo1stEnd;
8620 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8621 while(lastOffset < freeSpace1stTo2ndEnd)
8624 while(nextAlloc1stIndex < suballoc1stCount &&
8625 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8627 ++nextAlloc1stIndex;
8631 if(nextAlloc1stIndex < suballoc1stCount)
8633 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8636 if(lastOffset < suballoc.offset)
8639 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8640 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8645 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8648 lastOffset = suballoc.offset + suballoc.size;
8649 ++nextAlloc1stIndex;
8654 if(lastOffset < freeSpace1stTo2ndEnd)
8657 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8658 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8662 lastOffset = freeSpace1stTo2ndEnd;
8666 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8668 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8669 while(lastOffset < size)
8672 while(nextAlloc2ndIndex != SIZE_MAX &&
8673 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8675 --nextAlloc2ndIndex;
8679 if(nextAlloc2ndIndex != SIZE_MAX)
8681 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8684 if(lastOffset < suballoc.offset)
8687 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8688 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8693 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8696 lastOffset = suballoc.offset + suballoc.size;
8697 --nextAlloc2ndIndex;
8702 if(lastOffset < size)
8705 const VkDeviceSize unusedRangeSize = size - lastOffset;
8706 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8715 PrintDetailedMap_End(json);
8717 #endif // #if VMA_STATS_STRING_ENABLED 8719 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8720 uint32_t currentFrameIndex,
8721 uint32_t frameInUseCount,
8722 VkDeviceSize bufferImageGranularity,
8723 VkDeviceSize allocSize,
8724 VkDeviceSize allocAlignment,
8726 VmaSuballocationType allocType,
8727 bool canMakeOtherLost,
8729 VmaAllocationRequest* pAllocationRequest)
8731 VMA_ASSERT(allocSize > 0);
8732 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8733 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8734 VMA_HEAVY_ASSERT(Validate());
8736 const VkDeviceSize size = GetSize();
8737 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8738 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8742 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8744 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8749 if(allocSize > size)
8753 VkDeviceSize resultBaseOffset = size - allocSize;
8754 if(!suballocations2nd.empty())
8756 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8757 resultBaseOffset = lastSuballoc.offset - allocSize;
8758 if(allocSize > lastSuballoc.offset)
8765 VkDeviceSize resultOffset = resultBaseOffset;
8768 if(VMA_DEBUG_MARGIN > 0)
8770 if(resultOffset < VMA_DEBUG_MARGIN)
8774 resultOffset -= VMA_DEBUG_MARGIN;
8778 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8782 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8784 bool bufferImageGranularityConflict =
false;
8785 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8787 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8788 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8790 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8792 bufferImageGranularityConflict =
true;
8800 if(bufferImageGranularityConflict)
8802 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8807 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8808 suballocations1st.back().offset + suballocations1st.back().size :
8810 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8814 if(bufferImageGranularity > 1)
8816 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8818 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8819 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8821 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8835 pAllocationRequest->offset = resultOffset;
8836 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8837 pAllocationRequest->sumItemSize = 0;
8839 pAllocationRequest->itemsToMakeLostCount = 0;
8845 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8849 VkDeviceSize resultBaseOffset = 0;
8850 if(!suballocations1st.empty())
8852 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8853 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8857 VkDeviceSize resultOffset = resultBaseOffset;
8860 if(VMA_DEBUG_MARGIN > 0)
8862 resultOffset += VMA_DEBUG_MARGIN;
8866 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8870 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8872 bool bufferImageGranularityConflict =
false;
8873 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8875 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8876 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8878 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8880 bufferImageGranularityConflict =
true;
8888 if(bufferImageGranularityConflict)
8890 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8894 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8895 suballocations2nd.back().offset : size;
8898 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8902 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8904 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8906 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8907 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8909 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8923 pAllocationRequest->offset = resultOffset;
8924 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8925 pAllocationRequest->sumItemSize = 0;
8927 pAllocationRequest->itemsToMakeLostCount = 0;
8934 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8936 VMA_ASSERT(!suballocations1st.empty());
8938 VkDeviceSize resultBaseOffset = 0;
8939 if(!suballocations2nd.empty())
8941 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8942 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8946 VkDeviceSize resultOffset = resultBaseOffset;
8949 if(VMA_DEBUG_MARGIN > 0)
8951 resultOffset += VMA_DEBUG_MARGIN;
8955 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8959 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8961 bool bufferImageGranularityConflict =
false;
8962 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8964 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8965 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8967 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8969 bufferImageGranularityConflict =
true;
8977 if(bufferImageGranularityConflict)
8979 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8983 pAllocationRequest->itemsToMakeLostCount = 0;
8984 pAllocationRequest->sumItemSize = 0;
8985 size_t index1st = m_1stNullItemsBeginCount;
8987 if(canMakeOtherLost)
8989 while(index1st < suballocations1st.size() &&
8990 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8993 const VmaSuballocation& suballoc = suballocations1st[index1st];
8994 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9000 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9001 if(suballoc.hAllocation->CanBecomeLost() &&
9002 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9004 ++pAllocationRequest->itemsToMakeLostCount;
9005 pAllocationRequest->sumItemSize += suballoc.size;
9017 if(bufferImageGranularity > 1)
9019 while(index1st < suballocations1st.size())
9021 const VmaSuballocation& suballoc = suballocations1st[index1st];
9022 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
9024 if(suballoc.hAllocation != VK_NULL_HANDLE)
9027 if(suballoc.hAllocation->CanBecomeLost() &&
9028 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9030 ++pAllocationRequest->itemsToMakeLostCount;
9031 pAllocationRequest->sumItemSize += suballoc.size;
9050 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
9051 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
9055 if(bufferImageGranularity > 1)
9057 for(
size_t nextSuballocIndex = index1st;
9058 nextSuballocIndex < suballocations1st.size();
9059 nextSuballocIndex++)
9061 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9062 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9064 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9078 pAllocationRequest->offset = resultOffset;
9079 pAllocationRequest->sumFreeSize =
9080 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
9082 - pAllocationRequest->sumItemSize;
9092 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
9093 uint32_t currentFrameIndex,
9094 uint32_t frameInUseCount,
9095 VmaAllocationRequest* pAllocationRequest)
9097 if(pAllocationRequest->itemsToMakeLostCount == 0)
9102 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
9104 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9105 size_t index1st = m_1stNullItemsBeginCount;
9106 size_t madeLostCount = 0;
9107 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
9109 VMA_ASSERT(index1st < suballocations1st.size());
9110 VmaSuballocation& suballoc = suballocations1st[index1st];
9111 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9113 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9114 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
9115 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9117 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9118 suballoc.hAllocation = VK_NULL_HANDLE;
9119 m_SumFreeSize += suballoc.size;
9120 ++m_1stNullItemsMiddleCount;
9137 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9139 uint32_t lostAllocationCount = 0;
9141 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9142 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9144 VmaSuballocation& suballoc = suballocations1st[i];
9145 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9146 suballoc.hAllocation->CanBecomeLost() &&
9147 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9149 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9150 suballoc.hAllocation = VK_NULL_HANDLE;
9151 ++m_1stNullItemsMiddleCount;
9152 m_SumFreeSize += suballoc.size;
9153 ++lostAllocationCount;
9157 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9158 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9160 VmaSuballocation& suballoc = suballocations2nd[i];
9161 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9162 suballoc.hAllocation->CanBecomeLost() &&
9163 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9165 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9166 suballoc.hAllocation = VK_NULL_HANDLE;
9167 ++m_2ndNullItemsCount;
9168 ++lostAllocationCount;
9172 if(lostAllocationCount)
9177 return lostAllocationCount;
9180 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
9182 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9183 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9185 const VmaSuballocation& suballoc = suballocations1st[i];
9186 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9188 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9190 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9191 return VK_ERROR_VALIDATION_FAILED_EXT;
9193 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9195 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9196 return VK_ERROR_VALIDATION_FAILED_EXT;
9201 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9202 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9204 const VmaSuballocation& suballoc = suballocations2nd[i];
9205 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9207 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9209 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9210 return VK_ERROR_VALIDATION_FAILED_EXT;
9212 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9214 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9215 return VK_ERROR_VALIDATION_FAILED_EXT;
9223 void VmaBlockMetadata_Linear::Alloc(
9224 const VmaAllocationRequest& request,
9225 VmaSuballocationType type,
9226 VkDeviceSize allocSize,
9230 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9234 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
9235 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
9236 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9237 suballocations2nd.push_back(newSuballoc);
9238 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
9242 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9245 if(suballocations1st.empty())
9247 suballocations1st.push_back(newSuballoc);
9252 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
9255 VMA_ASSERT(request.offset + allocSize <= GetSize());
9256 suballocations1st.push_back(newSuballoc);
9259 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
9261 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9263 switch(m_2ndVectorMode)
9265 case SECOND_VECTOR_EMPTY:
9267 VMA_ASSERT(suballocations2nd.empty());
9268 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
9270 case SECOND_VECTOR_RING_BUFFER:
9272 VMA_ASSERT(!suballocations2nd.empty());
9274 case SECOND_VECTOR_DOUBLE_STACK:
9275 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
9281 suballocations2nd.push_back(newSuballoc);
9285 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
9290 m_SumFreeSize -= newSuballoc.size;
9293 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
9295 FreeAtOffset(allocation->GetOffset());
9298 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
9300 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9301 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9303 if(!suballocations1st.empty())
9306 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9307 if(firstSuballoc.offset == offset)
9309 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9310 firstSuballoc.hAllocation = VK_NULL_HANDLE;
9311 m_SumFreeSize += firstSuballoc.size;
9312 ++m_1stNullItemsBeginCount;
9319 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
9320 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9322 VmaSuballocation& lastSuballoc = suballocations2nd.back();
9323 if(lastSuballoc.offset == offset)
9325 m_SumFreeSize += lastSuballoc.size;
9326 suballocations2nd.pop_back();
9332 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
9334 VmaSuballocation& lastSuballoc = suballocations1st.back();
9335 if(lastSuballoc.offset == offset)
9337 m_SumFreeSize += lastSuballoc.size;
9338 suballocations1st.pop_back();
9346 VmaSuballocation refSuballoc;
9347 refSuballoc.offset = offset;
9349 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
9350 suballocations1st.begin() + m_1stNullItemsBeginCount,
9351 suballocations1st.end(),
9353 if(it != suballocations1st.end())
9355 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9356 it->hAllocation = VK_NULL_HANDLE;
9357 ++m_1stNullItemsMiddleCount;
9358 m_SumFreeSize += it->size;
9364 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
9367 VmaSuballocation refSuballoc;
9368 refSuballoc.offset = offset;
9370 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
9371 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
9372 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
9373 if(it != suballocations2nd.end())
9375 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9376 it->hAllocation = VK_NULL_HANDLE;
9377 ++m_2ndNullItemsCount;
9378 m_SumFreeSize += it->size;
9384 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
9387 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 9389 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9390 const size_t suballocCount = AccessSuballocations1st().size();
9391 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
9394 void VmaBlockMetadata_Linear::CleanupAfterFree()
9396 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9397 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9401 suballocations1st.clear();
9402 suballocations2nd.clear();
9403 m_1stNullItemsBeginCount = 0;
9404 m_1stNullItemsMiddleCount = 0;
9405 m_2ndNullItemsCount = 0;
9406 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9410 const size_t suballoc1stCount = suballocations1st.size();
9411 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9412 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
9415 while(m_1stNullItemsBeginCount < suballoc1stCount &&
9416 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9418 ++m_1stNullItemsBeginCount;
9419 --m_1stNullItemsMiddleCount;
9423 while(m_1stNullItemsMiddleCount > 0 &&
9424 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
9426 --m_1stNullItemsMiddleCount;
9427 suballocations1st.pop_back();
9431 while(m_2ndNullItemsCount > 0 &&
9432 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
9434 --m_2ndNullItemsCount;
9435 suballocations2nd.pop_back();
9438 if(ShouldCompact1st())
9440 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
9441 size_t srcIndex = m_1stNullItemsBeginCount;
9442 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
9444 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
9448 if(dstIndex != srcIndex)
9450 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9454 suballocations1st.resize(nonNullItemCount);
9455 m_1stNullItemsBeginCount = 0;
9456 m_1stNullItemsMiddleCount = 0;
9460 if(suballocations2nd.empty())
9462 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9466 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9468 suballocations1st.clear();
9469 m_1stNullItemsBeginCount = 0;
9471 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9474 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9475 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9476 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9477 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9479 ++m_1stNullItemsBeginCount;
9480 --m_1stNullItemsMiddleCount;
9482 m_2ndNullItemsCount = 0;
9483 m_1stVectorIndex ^= 1;
9488 VMA_HEAVY_ASSERT(Validate());
9495 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
9496 VmaBlockMetadata(hAllocator),
9498 m_AllocationCount(0),
9502 memset(m_FreeList, 0,
sizeof(m_FreeList));
9505 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9510 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
9512 VmaBlockMetadata::Init(size);
9514 m_UsableSize = VmaPrevPow2(size);
9515 m_SumFreeSize = m_UsableSize;
9519 while(m_LevelCount < MAX_LEVELS &&
9520 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
9525 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
9526 rootNode->offset = 0;
9527 rootNode->type = Node::TYPE_FREE;
9528 rootNode->parent = VMA_NULL;
9529 rootNode->buddy = VMA_NULL;
9532 AddToFreeListFront(0, rootNode);
9535 bool VmaBlockMetadata_Buddy::Validate()
const 9538 ValidationContext ctx;
9539 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
9541 VMA_VALIDATE(
false &&
"ValidateNode failed.");
9543 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
9544 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
9547 for(uint32_t level = 0; level < m_LevelCount; ++level)
9549 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
9550 m_FreeList[level].front->free.prev == VMA_NULL);
9552 for(Node* node = m_FreeList[level].front;
9554 node = node->free.next)
9556 VMA_VALIDATE(node->type == Node::TYPE_FREE);
9558 if(node->free.next == VMA_NULL)
9560 VMA_VALIDATE(m_FreeList[level].back == node);
9564 VMA_VALIDATE(node->free.next->free.prev == node);
9570 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
9572 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
9578 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 9580 for(uint32_t level = 0; level < m_LevelCount; ++level)
9582 if(m_FreeList[level].front != VMA_NULL)
9584 return LevelToNodeSize(level);
9590 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9592 const VkDeviceSize unusableSize = GetUnusableSize();
9603 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
9605 if(unusableSize > 0)
9614 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 9616 const VkDeviceSize unusableSize = GetUnusableSize();
9618 inoutStats.
size += GetSize();
9619 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
9624 if(unusableSize > 0)
9631 #if VMA_STATS_STRING_ENABLED 9633 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 9637 CalcAllocationStatInfo(stat);
9639 PrintDetailedMap_Begin(
9645 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
9647 const VkDeviceSize unusableSize = GetUnusableSize();
9648 if(unusableSize > 0)
9650 PrintDetailedMap_UnusedRange(json,
9655 PrintDetailedMap_End(json);
9658 #endif // #if VMA_STATS_STRING_ENABLED 9660 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9661 uint32_t currentFrameIndex,
9662 uint32_t frameInUseCount,
9663 VkDeviceSize bufferImageGranularity,
9664 VkDeviceSize allocSize,
9665 VkDeviceSize allocAlignment,
9667 VmaSuballocationType allocType,
9668 bool canMakeOtherLost,
9670 VmaAllocationRequest* pAllocationRequest)
9672 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9676 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
9677 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
9678 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
9680 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
9681 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
9684 if(allocSize > m_UsableSize)
9689 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9690 for(uint32_t level = targetLevel + 1; level--; )
9692 for(Node* freeNode = m_FreeList[level].front;
9693 freeNode != VMA_NULL;
9694 freeNode = freeNode->free.next)
9696 if(freeNode->offset % allocAlignment == 0)
9698 pAllocationRequest->offset = freeNode->offset;
9699 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
9700 pAllocationRequest->sumItemSize = 0;
9701 pAllocationRequest->itemsToMakeLostCount = 0;
9702 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9711 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
9712 uint32_t currentFrameIndex,
9713 uint32_t frameInUseCount,
9714 VmaAllocationRequest* pAllocationRequest)
9720 return pAllocationRequest->itemsToMakeLostCount == 0;
9723 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9732 void VmaBlockMetadata_Buddy::Alloc(
9733 const VmaAllocationRequest& request,
9734 VmaSuballocationType type,
9735 VkDeviceSize allocSize,
9739 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9740 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
9742 Node* currNode = m_FreeList[currLevel].front;
9743 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9744 while(currNode->offset != request.offset)
9746 currNode = currNode->free.next;
9747 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9751 while(currLevel < targetLevel)
9755 RemoveFromFreeList(currLevel, currNode);
9757 const uint32_t childrenLevel = currLevel + 1;
9760 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
9761 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
9763 leftChild->offset = currNode->offset;
9764 leftChild->type = Node::TYPE_FREE;
9765 leftChild->parent = currNode;
9766 leftChild->buddy = rightChild;
9768 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9769 rightChild->type = Node::TYPE_FREE;
9770 rightChild->parent = currNode;
9771 rightChild->buddy = leftChild;
9774 currNode->type = Node::TYPE_SPLIT;
9775 currNode->split.leftChild = leftChild;
9778 AddToFreeListFront(childrenLevel, rightChild);
9779 AddToFreeListFront(childrenLevel, leftChild);
9784 currNode = m_FreeList[currLevel].front;
9793 VMA_ASSERT(currLevel == targetLevel &&
9794 currNode != VMA_NULL &&
9795 currNode->type == Node::TYPE_FREE);
9796 RemoveFromFreeList(currLevel, currNode);
9799 currNode->type = Node::TYPE_ALLOCATION;
9800 currNode->allocation.alloc = hAllocation;
9802 ++m_AllocationCount;
9804 m_SumFreeSize -= allocSize;
9807 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
9809 if(node->type == Node::TYPE_SPLIT)
9811 DeleteNode(node->split.leftChild->buddy);
9812 DeleteNode(node->split.leftChild);
9815 vma_delete(GetAllocationCallbacks(), node);
9818 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 9820 VMA_VALIDATE(level < m_LevelCount);
9821 VMA_VALIDATE(curr->parent == parent);
9822 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
9823 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
9826 case Node::TYPE_FREE:
9828 ctx.calculatedSumFreeSize += levelNodeSize;
9829 ++ctx.calculatedFreeCount;
9831 case Node::TYPE_ALLOCATION:
9832 ++ctx.calculatedAllocationCount;
9833 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
9834 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
9836 case Node::TYPE_SPLIT:
9838 const uint32_t childrenLevel = level + 1;
9839 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
9840 const Node*
const leftChild = curr->split.leftChild;
9841 VMA_VALIDATE(leftChild != VMA_NULL);
9842 VMA_VALIDATE(leftChild->offset == curr->offset);
9843 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
9845 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
9847 const Node*
const rightChild = leftChild->buddy;
9848 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
9849 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
9851 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
9862 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 9866 VkDeviceSize currLevelNodeSize = m_UsableSize;
9867 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
9868 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
9871 currLevelNodeSize = nextLevelNodeSize;
9872 nextLevelNodeSize = currLevelNodeSize >> 1;
9877 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
9880 Node* node = m_Root;
9881 VkDeviceSize nodeOffset = 0;
9883 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
9884 while(node->type == Node::TYPE_SPLIT)
9886 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
9887 if(offset < nodeOffset + nextLevelSize)
9889 node = node->split.leftChild;
9893 node = node->split.leftChild->buddy;
9894 nodeOffset += nextLevelSize;
9897 levelNodeSize = nextLevelSize;
9900 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9901 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
9904 --m_AllocationCount;
9905 m_SumFreeSize += alloc->GetSize();
9907 node->type = Node::TYPE_FREE;
9910 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
9912 RemoveFromFreeList(level, node->buddy);
9913 Node*
const parent = node->parent;
9915 vma_delete(GetAllocationCallbacks(), node->buddy);
9916 vma_delete(GetAllocationCallbacks(), node);
9917 parent->type = Node::TYPE_FREE;
9925 AddToFreeListFront(level, node);
9928 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 9932 case Node::TYPE_FREE:
9938 case Node::TYPE_ALLOCATION:
9940 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9946 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
9947 if(unusedRangeSize > 0)
9956 case Node::TYPE_SPLIT:
9958 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9959 const Node*
const leftChild = node->split.leftChild;
9960 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
9961 const Node*
const rightChild = leftChild->buddy;
9962 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
9970 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
9972 VMA_ASSERT(node->type == Node::TYPE_FREE);
9975 Node*
const frontNode = m_FreeList[level].front;
9976 if(frontNode == VMA_NULL)
9978 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9979 node->free.prev = node->free.next = VMA_NULL;
9980 m_FreeList[level].front = m_FreeList[level].back = node;
9984 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9985 node->free.prev = VMA_NULL;
9986 node->free.next = frontNode;
9987 frontNode->free.prev = node;
9988 m_FreeList[level].front = node;
9992 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
9994 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
9997 if(node->free.prev == VMA_NULL)
9999 VMA_ASSERT(m_FreeList[level].front == node);
10000 m_FreeList[level].front = node->free.next;
10004 Node*
const prevFreeNode = node->free.prev;
10005 VMA_ASSERT(prevFreeNode->free.next == node);
10006 prevFreeNode->free.next = node->free.next;
10010 if(node->free.next == VMA_NULL)
10012 VMA_ASSERT(m_FreeList[level].back == node);
10013 m_FreeList[level].back = node->free.prev;
10017 Node*
const nextFreeNode = node->free.next;
10018 VMA_ASSERT(nextFreeNode->free.prev == node);
10019 nextFreeNode->free.prev = node->free.prev;
10023 #if VMA_STATS_STRING_ENABLED 10024 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 10028 case Node::TYPE_FREE:
10029 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
10031 case Node::TYPE_ALLOCATION:
10033 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
10034 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10035 if(allocSize < levelNodeSize)
10037 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
10041 case Node::TYPE_SPLIT:
10043 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10044 const Node*
const leftChild = node->split.leftChild;
10045 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
10046 const Node*
const rightChild = leftChild->buddy;
10047 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
10054 #endif // #if VMA_STATS_STRING_ENABLED 10060 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
10061 m_pMetadata(VMA_NULL),
10062 m_MemoryTypeIndex(UINT32_MAX),
10064 m_hMemory(VK_NULL_HANDLE),
10066 m_pMappedData(VMA_NULL)
10070 void VmaDeviceMemoryBlock::Init(
10072 uint32_t newMemoryTypeIndex,
10073 VkDeviceMemory newMemory,
10074 VkDeviceSize newSize,
10076 uint32_t algorithm)
10078 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
10080 m_MemoryTypeIndex = newMemoryTypeIndex;
10082 m_hMemory = newMemory;
10087 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
10090 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
10096 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
10098 m_pMetadata->Init(newSize);
10101 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
10105 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
10107 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
10108 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
10109 m_hMemory = VK_NULL_HANDLE;
10111 vma_delete(allocator, m_pMetadata);
10112 m_pMetadata = VMA_NULL;
10115 bool VmaDeviceMemoryBlock::Validate()
const 10117 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
10118 (m_pMetadata->GetSize() != 0));
10120 return m_pMetadata->Validate();
10123 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
10125 void* pData =
nullptr;
10126 VkResult res = Map(hAllocator, 1, &pData);
10127 if(res != VK_SUCCESS)
10132 res = m_pMetadata->CheckCorruption(pData);
10134 Unmap(hAllocator, 1);
10139 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
10146 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10147 if(m_MapCount != 0)
10149 m_MapCount += count;
10150 VMA_ASSERT(m_pMappedData != VMA_NULL);
10151 if(ppData != VMA_NULL)
10153 *ppData = m_pMappedData;
10159 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
10160 hAllocator->m_hDevice,
10166 if(result == VK_SUCCESS)
10168 if(ppData != VMA_NULL)
10170 *ppData = m_pMappedData;
10172 m_MapCount = count;
10178 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
10185 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10186 if(m_MapCount >= count)
10188 m_MapCount -= count;
10189 if(m_MapCount == 0)
10191 m_pMappedData = VMA_NULL;
10192 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
10197 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
10201 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10203 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10204 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10207 VkResult res = Map(hAllocator, 1, &pData);
10208 if(res != VK_SUCCESS)
10213 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
10214 VmaWriteMagicValue(pData, allocOffset + allocSize);
10216 Unmap(hAllocator, 1);
10221 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10223 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10224 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10227 VkResult res = Map(hAllocator, 1, &pData);
10228 if(res != VK_SUCCESS)
10233 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
10235 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
10237 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
10239 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
10242 Unmap(hAllocator, 1);
10247 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
10252 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10253 hAllocation->GetBlock() ==
this);
10255 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10256 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
10257 hAllocator->m_hDevice,
10260 hAllocation->GetOffset());
10263 VkResult VmaDeviceMemoryBlock::BindImageMemory(
10268 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10269 hAllocation->GetBlock() ==
this);
10271 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10272 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
10273 hAllocator->m_hDevice,
10276 hAllocation->GetOffset());
10281 memset(&outInfo, 0,
sizeof(outInfo));
10300 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
10308 VmaPool_T::VmaPool_T(
10311 VkDeviceSize preferredBlockSize) :
10314 createInfo.memoryTypeIndex,
10315 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
10316 createInfo.minBlockCount,
10317 createInfo.maxBlockCount,
10319 createInfo.frameInUseCount,
10321 createInfo.blockSize != 0,
10327 VmaPool_T::~VmaPool_T()
10331 #if VMA_STATS_STRING_ENABLED 10333 #endif // #if VMA_STATS_STRING_ENABLED 10335 VmaBlockVector::VmaBlockVector(
10337 uint32_t memoryTypeIndex,
10338 VkDeviceSize preferredBlockSize,
10339 size_t minBlockCount,
10340 size_t maxBlockCount,
10341 VkDeviceSize bufferImageGranularity,
10342 uint32_t frameInUseCount,
10344 bool explicitBlockSize,
10345 uint32_t algorithm) :
10346 m_hAllocator(hAllocator),
10347 m_MemoryTypeIndex(memoryTypeIndex),
10348 m_PreferredBlockSize(preferredBlockSize),
10349 m_MinBlockCount(minBlockCount),
10350 m_MaxBlockCount(maxBlockCount),
10351 m_BufferImageGranularity(bufferImageGranularity),
10352 m_FrameInUseCount(frameInUseCount),
10353 m_IsCustomPool(isCustomPool),
10354 m_ExplicitBlockSize(explicitBlockSize),
10355 m_Algorithm(algorithm),
10356 m_HasEmptyBlock(false),
10357 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
10358 m_pDefragmentator(VMA_NULL),
10363 VmaBlockVector::~VmaBlockVector()
10365 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
10367 for(
size_t i = m_Blocks.size(); i--; )
10369 m_Blocks[i]->Destroy(m_hAllocator);
10370 vma_delete(m_hAllocator, m_Blocks[i]);
10374 VkResult VmaBlockVector::CreateMinBlocks()
10376 for(
size_t i = 0; i < m_MinBlockCount; ++i)
10378 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
10379 if(res != VK_SUCCESS)
10387 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
10389 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10391 const size_t blockCount = m_Blocks.size();
10400 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10402 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10403 VMA_ASSERT(pBlock);
10404 VMA_HEAVY_ASSERT(pBlock->Validate());
10405 pBlock->m_pMetadata->AddPoolStats(*pStats);
10409 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 10411 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10412 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
10413 (VMA_DEBUG_MARGIN > 0) &&
10414 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
10417 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
10419 VkResult VmaBlockVector::Allocate(
10421 uint32_t currentFrameIndex,
10423 VkDeviceSize alignment,
10425 VmaSuballocationType suballocType,
10432 const bool canCreateNewBlock =
10434 (m_Blocks.size() < m_MaxBlockCount);
10441 canMakeOtherLost =
false;
10445 if(isUpperAddress &&
10448 return VK_ERROR_FEATURE_NOT_PRESENT;
10462 return VK_ERROR_FEATURE_NOT_PRESENT;
10466 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
10468 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10471 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10478 if(!canMakeOtherLost || canCreateNewBlock)
10487 if(!m_Blocks.empty())
10489 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
10490 VMA_ASSERT(pCurrBlock);
10491 VkResult res = AllocateFromBlock(
10502 if(res == VK_SUCCESS)
10504 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
10514 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10516 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10517 VMA_ASSERT(pCurrBlock);
10518 VkResult res = AllocateFromBlock(
10529 if(res == VK_SUCCESS)
10531 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10539 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10541 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10542 VMA_ASSERT(pCurrBlock);
10543 VkResult res = AllocateFromBlock(
10554 if(res == VK_SUCCESS)
10556 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10564 if(canCreateNewBlock)
10567 VkDeviceSize newBlockSize = m_PreferredBlockSize;
10568 uint32_t newBlockSizeShift = 0;
10569 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
10571 if(!m_ExplicitBlockSize)
10574 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
10575 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
10577 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10578 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
10580 newBlockSize = smallerNewBlockSize;
10581 ++newBlockSizeShift;
10590 size_t newBlockIndex = 0;
10591 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
10593 if(!m_ExplicitBlockSize)
10595 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
10597 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10598 if(smallerNewBlockSize >= size)
10600 newBlockSize = smallerNewBlockSize;
10601 ++newBlockSizeShift;
10602 res = CreateBlock(newBlockSize, &newBlockIndex);
10611 if(res == VK_SUCCESS)
10613 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
10614 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
10616 res = AllocateFromBlock(
10627 if(res == VK_SUCCESS)
10629 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
10635 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10642 if(canMakeOtherLost)
10644 uint32_t tryIndex = 0;
10645 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
10647 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
10648 VmaAllocationRequest bestRequest = {};
10649 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
10655 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10657 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10658 VMA_ASSERT(pCurrBlock);
10659 VmaAllocationRequest currRequest = {};
10660 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10663 m_BufferImageGranularity,
10672 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10673 if(pBestRequestBlock == VMA_NULL ||
10674 currRequestCost < bestRequestCost)
10676 pBestRequestBlock = pCurrBlock;
10677 bestRequest = currRequest;
10678 bestRequestCost = currRequestCost;
10680 if(bestRequestCost == 0)
10691 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10693 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10694 VMA_ASSERT(pCurrBlock);
10695 VmaAllocationRequest currRequest = {};
10696 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10699 m_BufferImageGranularity,
10708 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10709 if(pBestRequestBlock == VMA_NULL ||
10710 currRequestCost < bestRequestCost ||
10713 pBestRequestBlock = pCurrBlock;
10714 bestRequest = currRequest;
10715 bestRequestCost = currRequestCost;
10717 if(bestRequestCost == 0 ||
10727 if(pBestRequestBlock != VMA_NULL)
10731 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
10732 if(res != VK_SUCCESS)
10738 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
10744 if(pBestRequestBlock->m_pMetadata->IsEmpty())
10746 m_HasEmptyBlock =
false;
10749 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10750 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
10751 (*pAllocation)->InitBlockAllocation(
10754 bestRequest.offset,
10760 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
10761 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
10762 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
10763 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10765 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10767 if(IsCorruptionDetectionEnabled())
10769 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
10770 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10785 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
10787 return VK_ERROR_TOO_MANY_OBJECTS;
10791 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10794 void VmaBlockVector::Free(
10797 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
10801 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10803 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
10805 if(IsCorruptionDetectionEnabled())
10807 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
10808 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
10811 if(hAllocation->IsPersistentMap())
10813 pBlock->Unmap(m_hAllocator, 1);
10816 pBlock->m_pMetadata->Free(hAllocation);
10817 VMA_HEAVY_ASSERT(pBlock->Validate());
10819 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
10822 if(pBlock->m_pMetadata->IsEmpty())
10825 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
10827 pBlockToDelete = pBlock;
10833 m_HasEmptyBlock =
true;
10838 else if(m_HasEmptyBlock)
10840 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
10841 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
10843 pBlockToDelete = pLastBlock;
10844 m_Blocks.pop_back();
10845 m_HasEmptyBlock =
false;
10849 IncrementallySortBlocks();
10854 if(pBlockToDelete != VMA_NULL)
10856 VMA_DEBUG_LOG(
" Deleted empty allocation");
10857 pBlockToDelete->Destroy(m_hAllocator);
10858 vma_delete(m_hAllocator, pBlockToDelete);
10862 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 10864 VkDeviceSize result = 0;
10865 for(
size_t i = m_Blocks.size(); i--; )
10867 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
10868 if(result >= m_PreferredBlockSize)
10876 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
10878 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10880 if(m_Blocks[blockIndex] == pBlock)
10882 VmaVectorRemove(m_Blocks, blockIndex);
10889 void VmaBlockVector::IncrementallySortBlocks()
10894 for(
size_t i = 1; i < m_Blocks.size(); ++i)
10896 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
10898 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
10905 VkResult VmaBlockVector::AllocateFromBlock(
10906 VmaDeviceMemoryBlock* pBlock,
10908 uint32_t currentFrameIndex,
10910 VkDeviceSize alignment,
10913 VmaSuballocationType suballocType,
10922 VmaAllocationRequest currRequest = {};
10923 if(pBlock->m_pMetadata->CreateAllocationRequest(
10926 m_BufferImageGranularity,
10936 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
10940 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
10941 if(res != VK_SUCCESS)
10948 if(pBlock->m_pMetadata->IsEmpty())
10950 m_HasEmptyBlock =
false;
10953 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10954 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
10955 (*pAllocation)->InitBlockAllocation(
10958 currRequest.offset,
10964 VMA_HEAVY_ASSERT(pBlock->Validate());
10965 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
10966 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10968 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10970 if(IsCorruptionDetectionEnabled())
10972 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
10973 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10977 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10980 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
10982 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
10983 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
10984 allocInfo.allocationSize = blockSize;
10985 VkDeviceMemory mem = VK_NULL_HANDLE;
10986 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
10995 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
11000 allocInfo.allocationSize,
11004 m_Blocks.push_back(pBlock);
11005 if(pNewBlockIndex != VMA_NULL)
11007 *pNewBlockIndex = m_Blocks.size() - 1;
11013 #if VMA_STATS_STRING_ENABLED 11015 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
11017 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11019 json.BeginObject();
11023 json.WriteString(
"MemoryTypeIndex");
11024 json.WriteNumber(m_MemoryTypeIndex);
11026 json.WriteString(
"BlockSize");
11027 json.WriteNumber(m_PreferredBlockSize);
11029 json.WriteString(
"BlockCount");
11030 json.BeginObject(
true);
11031 if(m_MinBlockCount > 0)
11033 json.WriteString(
"Min");
11034 json.WriteNumber((uint64_t)m_MinBlockCount);
11036 if(m_MaxBlockCount < SIZE_MAX)
11038 json.WriteString(
"Max");
11039 json.WriteNumber((uint64_t)m_MaxBlockCount);
11041 json.WriteString(
"Cur");
11042 json.WriteNumber((uint64_t)m_Blocks.size());
11045 if(m_FrameInUseCount > 0)
11047 json.WriteString(
"FrameInUseCount");
11048 json.WriteNumber(m_FrameInUseCount);
11051 if(m_Algorithm != 0)
11053 json.WriteString(
"Algorithm");
11054 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
11059 json.WriteString(
"PreferredBlockSize");
11060 json.WriteNumber(m_PreferredBlockSize);
11063 json.WriteString(
"Blocks");
11064 json.BeginObject();
11065 for(
size_t i = 0; i < m_Blocks.size(); ++i)
11067 json.BeginString();
11068 json.ContinueString(m_Blocks[i]->GetId());
11071 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
11078 #endif // #if VMA_STATS_STRING_ENABLED 11080 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
11082 uint32_t currentFrameIndex)
11084 if(m_pDefragmentator == VMA_NULL)
11086 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
11089 currentFrameIndex);
11092 return m_pDefragmentator;
11095 VkResult VmaBlockVector::Defragment(
11097 VkDeviceSize& maxBytesToMove,
11098 uint32_t& maxAllocationsToMove)
11100 if(m_pDefragmentator == VMA_NULL)
11105 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11108 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
11111 if(pDefragmentationStats != VMA_NULL)
11113 const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
11114 const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
11115 pDefragmentationStats->
bytesMoved += bytesMoved;
11117 VMA_ASSERT(bytesMoved <= maxBytesToMove);
11118 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
11119 maxBytesToMove -= bytesMoved;
11120 maxAllocationsToMove -= allocationsMoved;
11124 m_HasEmptyBlock =
false;
11125 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11127 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11128 if(pBlock->m_pMetadata->IsEmpty())
11130 if(m_Blocks.size() > m_MinBlockCount)
11132 if(pDefragmentationStats != VMA_NULL)
11135 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
11138 VmaVectorRemove(m_Blocks, blockIndex);
11139 pBlock->Destroy(m_hAllocator);
11140 vma_delete(m_hAllocator, pBlock);
11144 m_HasEmptyBlock =
true;
11152 void VmaBlockVector::DestroyDefragmentator()
11154 if(m_pDefragmentator != VMA_NULL)
11156 vma_delete(m_hAllocator, m_pDefragmentator);
11157 m_pDefragmentator = VMA_NULL;
11161 void VmaBlockVector::MakePoolAllocationsLost(
11162 uint32_t currentFrameIndex,
11163 size_t* pLostAllocationCount)
11165 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11166 size_t lostAllocationCount = 0;
11167 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11169 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11170 VMA_ASSERT(pBlock);
11171 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
11173 if(pLostAllocationCount != VMA_NULL)
11175 *pLostAllocationCount = lostAllocationCount;
11179 VkResult VmaBlockVector::CheckCorruption()
11181 if(!IsCorruptionDetectionEnabled())
11183 return VK_ERROR_FEATURE_NOT_PRESENT;
11186 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11187 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11189 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11190 VMA_ASSERT(pBlock);
11191 VkResult res = pBlock->CheckCorruption(m_hAllocator);
11192 if(res != VK_SUCCESS)
11200 void VmaBlockVector::AddStats(
VmaStats* pStats)
11202 const uint32_t memTypeIndex = m_MemoryTypeIndex;
11203 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
11205 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11207 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11209 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11210 VMA_ASSERT(pBlock);
11211 VMA_HEAVY_ASSERT(pBlock->Validate());
11213 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
11214 VmaAddStatInfo(pStats->
total, allocationStatInfo);
11215 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
11216 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11223 VmaDefragmentator::VmaDefragmentator(
11225 VmaBlockVector* pBlockVector,
11226 uint32_t currentFrameIndex) :
11227 m_hAllocator(hAllocator),
11228 m_pBlockVector(pBlockVector),
11229 m_CurrentFrameIndex(currentFrameIndex),
11231 m_AllocationsMoved(0),
11232 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
11233 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
11235 VMA_ASSERT(pBlockVector->GetAlgorithm() == 0);
11238 VmaDefragmentator::~VmaDefragmentator()
11240 for(
size_t i = m_Blocks.size(); i--; )
11242 vma_delete(m_hAllocator, m_Blocks[i]);
11246 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
11248 AllocationInfo allocInfo;
11249 allocInfo.m_hAllocation = hAlloc;
11250 allocInfo.m_pChanged = pChanged;
11251 m_Allocations.push_back(allocInfo);
11254 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
11257 if(m_pMappedDataForDefragmentation)
11259 *ppMappedData = m_pMappedDataForDefragmentation;
11264 if(m_pBlock->GetMappedData())
11266 *ppMappedData = m_pBlock->GetMappedData();
11271 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
11272 *ppMappedData = m_pMappedDataForDefragmentation;
11276 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
11278 if(m_pMappedDataForDefragmentation != VMA_NULL)
11280 m_pBlock->Unmap(hAllocator, 1);
11284 VkResult VmaDefragmentator::DefragmentRound(
11285 VkDeviceSize maxBytesToMove,
11286 uint32_t maxAllocationsToMove)
11288 if(m_Blocks.empty())
11293 size_t srcBlockIndex = m_Blocks.size() - 1;
11294 size_t srcAllocIndex = SIZE_MAX;
11300 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
11302 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
11305 if(srcBlockIndex == 0)
11312 srcAllocIndex = SIZE_MAX;
11317 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
11321 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
11322 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
11324 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
11325 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
11326 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
11327 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
11330 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
11332 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
11333 VmaAllocationRequest dstAllocRequest;
11334 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
11335 m_CurrentFrameIndex,
11336 m_pBlockVector->GetFrameInUseCount(),
11337 m_pBlockVector->GetBufferImageGranularity(),
11344 &dstAllocRequest) &&
11346 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
11348 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
11351 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
11352 (m_BytesMoved + size > maxBytesToMove))
11354 return VK_INCOMPLETE;
11357 void* pDstMappedData = VMA_NULL;
11358 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
11359 if(res != VK_SUCCESS)
11364 void* pSrcMappedData = VMA_NULL;
11365 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
11366 if(res != VK_SUCCESS)
11373 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
11374 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
11375 static_cast<size_t>(size));
11377 if(VMA_DEBUG_MARGIN > 0)
11379 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
11380 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
11383 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
11388 allocInfo.m_hAllocation);
11389 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
11391 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
11393 if(allocInfo.m_pChanged != VMA_NULL)
11395 *allocInfo.m_pChanged = VK_TRUE;
11398 ++m_AllocationsMoved;
11399 m_BytesMoved += size;
11401 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
11409 if(srcAllocIndex > 0)
11415 if(srcBlockIndex > 0)
11418 srcAllocIndex = SIZE_MAX;
11428 VkResult VmaDefragmentator::Defragment(
11429 VkDeviceSize maxBytesToMove,
11430 uint32_t maxAllocationsToMove)
11432 if(m_Allocations.empty())
11438 const size_t blockCount = m_pBlockVector->m_Blocks.size();
11439 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11441 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
11442 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
11443 m_Blocks.push_back(pBlockInfo);
11447 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
11450 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
11452 AllocationInfo& allocInfo = m_Allocations[blockIndex];
11454 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11456 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
11457 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
11458 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
11460 (*it)->m_Allocations.push_back(allocInfo);
11468 m_Allocations.clear();
11470 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11472 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
11473 pBlockInfo->CalcHasNonMovableAllocations();
11474 pBlockInfo->SortAllocationsBySizeDescecnding();
11478 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
11481 VkResult result = VK_SUCCESS;
11482 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
11484 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
11488 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11490 m_Blocks[blockIndex]->Unmap(m_hAllocator);
11496 bool VmaDefragmentator::MoveMakesSense(
11497 size_t dstBlockIndex, VkDeviceSize dstOffset,
11498 size_t srcBlockIndex, VkDeviceSize srcOffset)
11500 if(dstBlockIndex < srcBlockIndex)
11504 if(dstBlockIndex > srcBlockIndex)
11508 if(dstOffset < srcOffset)
11518 #if VMA_RECORDING_ENABLED 11520 VmaRecorder::VmaRecorder() :
11525 m_StartCounter(INT64_MAX)
11531 m_UseMutex = useMutex;
11532 m_Flags = settings.
flags;
11534 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
11535 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
11538 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
11541 return VK_ERROR_INITIALIZATION_FAILED;
11545 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
11546 fprintf(m_File,
"%s\n",
"1,4");
11551 VmaRecorder::~VmaRecorder()
11553 if(m_File != VMA_NULL)
11559 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
11561 CallParams callParams;
11562 GetBasicParams(callParams);
11564 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11565 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
11569 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
11571 CallParams callParams;
11572 GetBasicParams(callParams);
11574 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11575 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
11581 CallParams callParams;
11582 GetBasicParams(callParams);
11584 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11585 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
11596 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
11598 CallParams callParams;
11599 GetBasicParams(callParams);
11601 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11602 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
11607 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
11608 const VkMemoryRequirements& vkMemReq,
11612 CallParams callParams;
11613 GetBasicParams(callParams);
11615 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11616 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11617 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11619 vkMemReq.alignment,
11620 vkMemReq.memoryTypeBits,
11628 userDataStr.GetString());
11632 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
11633 const VkMemoryRequirements& vkMemReq,
11634 bool requiresDedicatedAllocation,
11635 bool prefersDedicatedAllocation,
11639 CallParams callParams;
11640 GetBasicParams(callParams);
11642 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11643 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11644 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11646 vkMemReq.alignment,
11647 vkMemReq.memoryTypeBits,
11648 requiresDedicatedAllocation ? 1 : 0,
11649 prefersDedicatedAllocation ? 1 : 0,
11657 userDataStr.GetString());
11661 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
11662 const VkMemoryRequirements& vkMemReq,
11663 bool requiresDedicatedAllocation,
11664 bool prefersDedicatedAllocation,
11668 CallParams callParams;
11669 GetBasicParams(callParams);
11671 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11672 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11673 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11675 vkMemReq.alignment,
11676 vkMemReq.memoryTypeBits,
11677 requiresDedicatedAllocation ? 1 : 0,
11678 prefersDedicatedAllocation ? 1 : 0,
11686 userDataStr.GetString());
11690 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
11693 CallParams callParams;
11694 GetBasicParams(callParams);
11696 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11697 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11702 void VmaRecorder::RecordResizeAllocation(
11703 uint32_t frameIndex,
11705 VkDeviceSize newSize)
11707 CallParams callParams;
11708 GetBasicParams(callParams);
11710 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11711 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
11712 allocation, newSize);
11716 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
11718 const void* pUserData)
11720 CallParams callParams;
11721 GetBasicParams(callParams);
11723 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11724 UserDataString userDataStr(
11727 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11729 userDataStr.GetString());
11733 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
11736 CallParams callParams;
11737 GetBasicParams(callParams);
11739 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11740 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11745 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
11748 CallParams callParams;
11749 GetBasicParams(callParams);
11751 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11752 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11757 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
11760 CallParams callParams;
11761 GetBasicParams(callParams);
11763 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11764 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11769 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
11770 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11772 CallParams callParams;
11773 GetBasicParams(callParams);
11775 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11776 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11783 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
11784 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11786 CallParams callParams;
11787 GetBasicParams(callParams);
11789 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11790 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11797 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
11798 const VkBufferCreateInfo& bufCreateInfo,
11802 CallParams callParams;
11803 GetBasicParams(callParams);
11805 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11806 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11807 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11808 bufCreateInfo.flags,
11809 bufCreateInfo.size,
11810 bufCreateInfo.usage,
11811 bufCreateInfo.sharingMode,
11812 allocCreateInfo.
flags,
11813 allocCreateInfo.
usage,
11817 allocCreateInfo.
pool,
11819 userDataStr.GetString());
11823 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
11824 const VkImageCreateInfo& imageCreateInfo,
11828 CallParams callParams;
11829 GetBasicParams(callParams);
11831 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11832 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11833 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11834 imageCreateInfo.flags,
11835 imageCreateInfo.imageType,
11836 imageCreateInfo.format,
11837 imageCreateInfo.extent.width,
11838 imageCreateInfo.extent.height,
11839 imageCreateInfo.extent.depth,
11840 imageCreateInfo.mipLevels,
11841 imageCreateInfo.arrayLayers,
11842 imageCreateInfo.samples,
11843 imageCreateInfo.tiling,
11844 imageCreateInfo.usage,
11845 imageCreateInfo.sharingMode,
11846 imageCreateInfo.initialLayout,
11847 allocCreateInfo.
flags,
11848 allocCreateInfo.
usage,
11852 allocCreateInfo.
pool,
11854 userDataStr.GetString());
11858 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
11861 CallParams callParams;
11862 GetBasicParams(callParams);
11864 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11865 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
11870 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
11873 CallParams callParams;
11874 GetBasicParams(callParams);
11876 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11877 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
11882 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
11885 CallParams callParams;
11886 GetBasicParams(callParams);
11888 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11889 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11894 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
11897 CallParams callParams;
11898 GetBasicParams(callParams);
11900 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11901 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
11906 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
11909 CallParams callParams;
11910 GetBasicParams(callParams);
11912 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11913 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
11920 if(pUserData != VMA_NULL)
11924 m_Str = (
const char*)pUserData;
11928 sprintf_s(m_PtrStr,
"%p", pUserData);
11938 void VmaRecorder::WriteConfiguration(
11939 const VkPhysicalDeviceProperties& devProps,
11940 const VkPhysicalDeviceMemoryProperties& memProps,
11941 bool dedicatedAllocationExtensionEnabled)
11943 fprintf(m_File,
"Config,Begin\n");
11945 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
11946 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
11947 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
11948 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
11949 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
11950 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
11952 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
11953 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
11954 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
11956 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
11957 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
11959 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
11960 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
11962 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
11963 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
11965 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
11966 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
11969 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
11971 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
11972 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
11973 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
11974 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
11975 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
11976 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
11977 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
11978 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
11979 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
11981 fprintf(m_File,
"Config,End\n");
11984 void VmaRecorder::GetBasicParams(CallParams& outParams)
11986 outParams.threadId = GetCurrentThreadId();
11988 LARGE_INTEGER counter;
11989 QueryPerformanceCounter(&counter);
11990 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
11993 void VmaRecorder::Flush()
12001 #endif // #if VMA_RECORDING_ENABLED 12009 m_hDevice(pCreateInfo->device),
12010 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
12011 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
12012 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
12013 m_PreferredLargeHeapBlockSize(0),
12014 m_PhysicalDevice(pCreateInfo->physicalDevice),
12015 m_CurrentFrameIndex(0),
12016 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
12019 ,m_pRecorder(VMA_NULL)
12022 if(VMA_DEBUG_DETECT_CORRUPTION)
12025 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
12030 #if !(VMA_DEDICATED_ALLOCATION) 12033 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
12037 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
12038 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
12039 memset(&m_MemProps, 0,
sizeof(m_MemProps));
12041 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
12042 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
12044 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12046 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
12057 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
12058 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
12060 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
12061 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
12062 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
12063 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
12070 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
12072 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
12073 if(limit != VK_WHOLE_SIZE)
12075 m_HeapSizeLimit[heapIndex] = limit;
12076 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
12078 m_MemProps.memoryHeaps[heapIndex].size = limit;
12084 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12086 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
12088 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
12091 preferredBlockSize,
12094 GetBufferImageGranularity(),
12101 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
12108 VkResult res = VK_SUCCESS;
12113 #if VMA_RECORDING_ENABLED 12114 m_pRecorder = vma_new(
this, VmaRecorder)();
12116 if(res != VK_SUCCESS)
12120 m_pRecorder->WriteConfiguration(
12121 m_PhysicalDeviceProperties,
12123 m_UseKhrDedicatedAllocation);
12124 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
12126 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
12127 return VK_ERROR_FEATURE_NOT_PRESENT;
12134 VmaAllocator_T::~VmaAllocator_T()
12136 #if VMA_RECORDING_ENABLED 12137 if(m_pRecorder != VMA_NULL)
12139 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
12140 vma_delete(
this, m_pRecorder);
12144 VMA_ASSERT(m_Pools.empty());
12146 for(
size_t i = GetMemoryTypeCount(); i--; )
12148 vma_delete(
this, m_pDedicatedAllocations[i]);
12149 vma_delete(
this, m_pBlockVectors[i]);
12153 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
12155 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 12156 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
12157 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
12158 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
12159 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
12160 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
12161 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
12162 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
12163 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
12164 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
12165 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
12166 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
12167 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
12168 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
12169 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
12170 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
12171 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
12172 #if VMA_DEDICATED_ALLOCATION 12173 if(m_UseKhrDedicatedAllocation)
12175 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
12176 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
12177 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
12178 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
12180 #endif // #if VMA_DEDICATED_ALLOCATION 12181 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 12183 #define VMA_COPY_IF_NOT_NULL(funcName) \ 12184 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 12186 if(pVulkanFunctions != VMA_NULL)
12188 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
12189 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
12190 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
12191 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
12192 VMA_COPY_IF_NOT_NULL(vkMapMemory);
12193 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
12194 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
12195 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
12196 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
12197 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
12198 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
12199 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
12200 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
12201 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
12202 VMA_COPY_IF_NOT_NULL(vkCreateImage);
12203 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
12204 #if VMA_DEDICATED_ALLOCATION 12205 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
12206 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
12210 #undef VMA_COPY_IF_NOT_NULL 12214 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
12215 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
12216 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
12217 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
12218 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
12219 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
12220 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
12221 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
12222 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
12223 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
12224 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
12225 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
12226 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
12227 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
12228 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
12229 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
12230 #if VMA_DEDICATED_ALLOCATION 12231 if(m_UseKhrDedicatedAllocation)
12233 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
12234 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
12239 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
12241 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12242 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
12243 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
12244 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
12247 VkResult VmaAllocator_T::AllocateMemoryOfType(
12249 VkDeviceSize alignment,
12250 bool dedicatedAllocation,
12251 VkBuffer dedicatedBuffer,
12252 VkImage dedicatedImage,
12254 uint32_t memTypeIndex,
12255 VmaSuballocationType suballocType,
12258 VMA_ASSERT(pAllocation != VMA_NULL);
12259 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
12265 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12270 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
12271 VMA_ASSERT(blockVector);
12273 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
12274 bool preferDedicatedMemory =
12275 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
12276 dedicatedAllocation ||
12278 size > preferredBlockSize / 2;
12280 if(preferDedicatedMemory &&
12282 finalCreateInfo.
pool == VK_NULL_HANDLE)
12291 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12295 return AllocateDedicatedMemory(
12309 VkResult res = blockVector->Allocate(
12311 m_CurrentFrameIndex.load(),
12317 if(res == VK_SUCCESS)
12325 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12329 res = AllocateDedicatedMemory(
12335 finalCreateInfo.pUserData,
12339 if(res == VK_SUCCESS)
12342 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
12348 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12355 VkResult VmaAllocator_T::AllocateDedicatedMemory(
12357 VmaSuballocationType suballocType,
12358 uint32_t memTypeIndex,
12360 bool isUserDataString,
12362 VkBuffer dedicatedBuffer,
12363 VkImage dedicatedImage,
12366 VMA_ASSERT(pAllocation);
12368 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12369 allocInfo.memoryTypeIndex = memTypeIndex;
12370 allocInfo.allocationSize = size;
12372 #if VMA_DEDICATED_ALLOCATION 12373 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
12374 if(m_UseKhrDedicatedAllocation)
12376 if(dedicatedBuffer != VK_NULL_HANDLE)
12378 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
12379 dedicatedAllocInfo.buffer = dedicatedBuffer;
12380 allocInfo.pNext = &dedicatedAllocInfo;
12382 else if(dedicatedImage != VK_NULL_HANDLE)
12384 dedicatedAllocInfo.image = dedicatedImage;
12385 allocInfo.pNext = &dedicatedAllocInfo;
12388 #endif // #if VMA_DEDICATED_ALLOCATION 12391 VkDeviceMemory hMemory = VK_NULL_HANDLE;
12392 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
12395 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12399 void* pMappedData = VMA_NULL;
12402 res = (*m_VulkanFunctions.vkMapMemory)(
12411 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
12412 FreeVulkanMemory(memTypeIndex, size, hMemory);
12417 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
12418 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
12419 (*pAllocation)->SetUserData(
this, pUserData);
12420 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12422 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12427 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12428 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12429 VMA_ASSERT(pDedicatedAllocations);
12430 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
12433 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
12438 void VmaAllocator_T::GetBufferMemoryRequirements(
12440 VkMemoryRequirements& memReq,
12441 bool& requiresDedicatedAllocation,
12442 bool& prefersDedicatedAllocation)
const 12444 #if VMA_DEDICATED_ALLOCATION 12445 if(m_UseKhrDedicatedAllocation)
12447 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
12448 memReqInfo.buffer = hBuffer;
12450 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12452 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12453 memReq2.pNext = &memDedicatedReq;
12455 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12457 memReq = memReq2.memoryRequirements;
12458 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12459 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12462 #endif // #if VMA_DEDICATED_ALLOCATION 12464 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
12465 requiresDedicatedAllocation =
false;
12466 prefersDedicatedAllocation =
false;
12470 void VmaAllocator_T::GetImageMemoryRequirements(
12472 VkMemoryRequirements& memReq,
12473 bool& requiresDedicatedAllocation,
12474 bool& prefersDedicatedAllocation)
const 12476 #if VMA_DEDICATED_ALLOCATION 12477 if(m_UseKhrDedicatedAllocation)
12479 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
12480 memReqInfo.image = hImage;
12482 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12484 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12485 memReq2.pNext = &memDedicatedReq;
12487 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12489 memReq = memReq2.memoryRequirements;
12490 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12491 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12494 #endif // #if VMA_DEDICATED_ALLOCATION 12496 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
12497 requiresDedicatedAllocation =
false;
12498 prefersDedicatedAllocation =
false;
12502 VkResult VmaAllocator_T::AllocateMemory(
12503 const VkMemoryRequirements& vkMemReq,
12504 bool requiresDedicatedAllocation,
12505 bool prefersDedicatedAllocation,
12506 VkBuffer dedicatedBuffer,
12507 VkImage dedicatedImage,
12509 VmaSuballocationType suballocType,
12512 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
12514 if(vkMemReq.size == 0)
12516 return VK_ERROR_VALIDATION_FAILED_EXT;
12521 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
12522 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12527 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
12528 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12530 if(requiresDedicatedAllocation)
12534 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
12535 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12537 if(createInfo.
pool != VK_NULL_HANDLE)
12539 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
12540 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12543 if((createInfo.
pool != VK_NULL_HANDLE) &&
12546 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
12547 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12550 if(createInfo.
pool != VK_NULL_HANDLE)
12552 const VkDeviceSize alignmentForPool = VMA_MAX(
12553 vkMemReq.alignment,
12554 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
12555 return createInfo.
pool->m_BlockVector.Allocate(
12557 m_CurrentFrameIndex.load(),
12567 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
12568 uint32_t memTypeIndex = UINT32_MAX;
12570 if(res == VK_SUCCESS)
12572 VkDeviceSize alignmentForMemType = VMA_MAX(
12573 vkMemReq.alignment,
12574 GetMemoryTypeMinAlignment(memTypeIndex));
12576 res = AllocateMemoryOfType(
12578 alignmentForMemType,
12579 requiresDedicatedAllocation || prefersDedicatedAllocation,
12587 if(res == VK_SUCCESS)
12597 memoryTypeBits &= ~(1u << memTypeIndex);
12600 if(res == VK_SUCCESS)
12602 alignmentForMemType = VMA_MAX(
12603 vkMemReq.alignment,
12604 GetMemoryTypeMinAlignment(memTypeIndex));
12606 res = AllocateMemoryOfType(
12608 alignmentForMemType,
12609 requiresDedicatedAllocation || prefersDedicatedAllocation,
12617 if(res == VK_SUCCESS)
12627 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12638 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
12640 VMA_ASSERT(allocation);
12642 if(TouchAllocation(allocation))
12644 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12646 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
12649 switch(allocation->GetType())
12651 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12653 VmaBlockVector* pBlockVector = VMA_NULL;
12654 VmaPool hPool = allocation->GetPool();
12655 if(hPool != VK_NULL_HANDLE)
12657 pBlockVector = &hPool->m_BlockVector;
12661 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12662 pBlockVector = m_pBlockVectors[memTypeIndex];
12664 pBlockVector->Free(allocation);
12667 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12668 FreeDedicatedMemory(allocation);
12675 allocation->SetUserData(
this, VMA_NULL);
12676 vma_delete(
this, allocation);
12679 VkResult VmaAllocator_T::ResizeAllocation(
12681 VkDeviceSize newSize)
12683 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
12685 return VK_ERROR_VALIDATION_FAILED_EXT;
12687 if(newSize == alloc->GetSize())
12692 switch(alloc->GetType())
12694 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12695 return VK_ERROR_FEATURE_NOT_PRESENT;
12696 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12697 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
12699 alloc->ChangeSize(newSize);
12700 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
12705 return VK_ERROR_OUT_OF_POOL_MEMORY;
12709 return VK_ERROR_VALIDATION_FAILED_EXT;
12713 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
12716 InitStatInfo(pStats->
total);
12717 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
12719 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12723 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12725 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12726 VMA_ASSERT(pBlockVector);
12727 pBlockVector->AddStats(pStats);
12732 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12733 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12735 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
12740 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12742 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12743 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12744 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12745 VMA_ASSERT(pDedicatedAllocVector);
12746 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
12749 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
12750 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12751 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12752 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12757 VmaPostprocessCalcStatInfo(pStats->
total);
12758 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
12759 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
12760 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
12761 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
12764 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
12766 VkResult VmaAllocator_T::Defragment(
12768 size_t allocationCount,
12769 VkBool32* pAllocationsChanged,
12773 if(pAllocationsChanged != VMA_NULL)
12775 memset(pAllocationsChanged, 0, allocationCount *
sizeof(VkBool32));
12777 if(pDefragmentationStats != VMA_NULL)
12779 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
12782 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
12784 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
12786 const size_t poolCount = m_Pools.size();
12789 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12792 VMA_ASSERT(hAlloc);
12793 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
12795 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12796 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
12798 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
12800 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
12802 VmaBlockVector* pAllocBlockVector = VMA_NULL;
12804 const VmaPool hAllocPool = hAlloc->GetPool();
12806 if(hAllocPool != VK_NULL_HANDLE)
12809 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
12811 pAllocBlockVector = &hAllocPool->m_BlockVector;
12817 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
12820 if(pAllocBlockVector != VMA_NULL)
12822 VmaDefragmentator*
const pDefragmentator =
12823 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
12824 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
12825 &pAllocationsChanged[allocIndex] : VMA_NULL;
12826 pDefragmentator->AddAllocation(hAlloc, pChanged);
12831 VkResult result = VK_SUCCESS;
12835 VkDeviceSize maxBytesToMove = SIZE_MAX;
12836 uint32_t maxAllocationsToMove = UINT32_MAX;
12837 if(pDefragmentationInfo != VMA_NULL)
12844 for(uint32_t memTypeIndex = 0;
12845 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
12849 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12851 result = m_pBlockVectors[memTypeIndex]->Defragment(
12852 pDefragmentationStats,
12854 maxAllocationsToMove);
12859 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
12861 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
12862 pDefragmentationStats,
12864 maxAllocationsToMove);
12870 for(
size_t poolIndex = poolCount; poolIndex--; )
12872 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
12876 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
12878 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12880 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
12889 if(hAllocation->CanBecomeLost())
12895 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12896 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12899 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12903 pAllocationInfo->
offset = 0;
12904 pAllocationInfo->
size = hAllocation->GetSize();
12906 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12909 else if(localLastUseFrameIndex == localCurrFrameIndex)
12911 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12912 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12913 pAllocationInfo->
offset = hAllocation->GetOffset();
12914 pAllocationInfo->
size = hAllocation->GetSize();
12916 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12921 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12923 localLastUseFrameIndex = localCurrFrameIndex;
12930 #if VMA_STATS_STRING_ENABLED 12931 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12932 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12935 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12936 if(localLastUseFrameIndex == localCurrFrameIndex)
12942 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12944 localLastUseFrameIndex = localCurrFrameIndex;
12950 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12951 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12952 pAllocationInfo->
offset = hAllocation->GetOffset();
12953 pAllocationInfo->
size = hAllocation->GetSize();
12954 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
12955 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12959 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
12962 if(hAllocation->CanBecomeLost())
12964 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12965 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12968 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12972 else if(localLastUseFrameIndex == localCurrFrameIndex)
12978 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12980 localLastUseFrameIndex = localCurrFrameIndex;
12987 #if VMA_STATS_STRING_ENABLED 12988 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12989 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12992 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12993 if(localLastUseFrameIndex == localCurrFrameIndex)
12999 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
13001 localLastUseFrameIndex = localCurrFrameIndex;
13013 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
13023 return VK_ERROR_INITIALIZATION_FAILED;
13026 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
13028 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
13030 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
13031 if(res != VK_SUCCESS)
13033 vma_delete(
this, *pPool);
13040 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13041 (*pPool)->SetId(m_NextPoolId++);
13042 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
13048 void VmaAllocator_T::DestroyPool(
VmaPool pool)
13052 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13053 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
13054 VMA_ASSERT(success &&
"Pool not found in Allocator.");
13057 vma_delete(
this, pool);
13062 pool->m_BlockVector.GetPoolStats(pPoolStats);
13065 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
13067 m_CurrentFrameIndex.store(frameIndex);
13070 void VmaAllocator_T::MakePoolAllocationsLost(
13072 size_t* pLostAllocationCount)
13074 hPool->m_BlockVector.MakePoolAllocationsLost(
13075 m_CurrentFrameIndex.load(),
13076 pLostAllocationCount);
13079 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
13081 return hPool->m_BlockVector.CheckCorruption();
13084 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
13086 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
13089 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13091 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
13093 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
13094 VMA_ASSERT(pBlockVector);
13095 VkResult localRes = pBlockVector->CheckCorruption();
13098 case VK_ERROR_FEATURE_NOT_PRESENT:
13101 finalRes = VK_SUCCESS;
13111 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13112 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
13114 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
13116 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
13119 case VK_ERROR_FEATURE_NOT_PRESENT:
13122 finalRes = VK_SUCCESS;
13134 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
13136 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
13137 (*pAllocation)->InitLost();
13140 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
13142 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
13145 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
13147 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
13148 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
13150 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
13151 if(res == VK_SUCCESS)
13153 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
13158 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
13163 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
13166 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
13168 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
13174 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
13176 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
13178 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
13181 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
13183 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
13184 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
13186 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
13187 m_HeapSizeLimit[heapIndex] += size;
13191 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
13193 if(hAllocation->CanBecomeLost())
13195 return VK_ERROR_MEMORY_MAP_FAILED;
13198 switch(hAllocation->GetType())
13200 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13202 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
13203 char *pBytes = VMA_NULL;
13204 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
13205 if(res == VK_SUCCESS)
13207 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
13208 hAllocation->BlockAllocMap();
13212 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13213 return hAllocation->DedicatedAllocMap(
this, ppData);
13216 return VK_ERROR_MEMORY_MAP_FAILED;
13222 switch(hAllocation->GetType())
13224 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13226 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
13227 hAllocation->BlockAllocUnmap();
13228 pBlock->Unmap(
this, 1);
13231 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13232 hAllocation->DedicatedAllocUnmap(
this);
13239 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
13241 VkResult res = VK_SUCCESS;
13242 switch(hAllocation->GetType())
13244 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13245 res = GetVulkanFunctions().vkBindBufferMemory(
13248 hAllocation->GetMemory(),
13251 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13253 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13254 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
13255 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
13264 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
13266 VkResult res = VK_SUCCESS;
13267 switch(hAllocation->GetType())
13269 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13270 res = GetVulkanFunctions().vkBindImageMemory(
13273 hAllocation->GetMemory(),
13276 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13278 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13279 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
13280 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
13289 void VmaAllocator_T::FlushOrInvalidateAllocation(
13291 VkDeviceSize offset, VkDeviceSize size,
13292 VMA_CACHE_OPERATION op)
13294 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
13295 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
13297 const VkDeviceSize allocationSize = hAllocation->GetSize();
13298 VMA_ASSERT(offset <= allocationSize);
13300 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13302 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13303 memRange.memory = hAllocation->GetMemory();
13305 switch(hAllocation->GetType())
13307 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13308 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13309 if(size == VK_WHOLE_SIZE)
13311 memRange.size = allocationSize - memRange.offset;
13315 VMA_ASSERT(offset + size <= allocationSize);
13316 memRange.size = VMA_MIN(
13317 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
13318 allocationSize - memRange.offset);
13322 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13325 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13326 if(size == VK_WHOLE_SIZE)
13328 size = allocationSize - offset;
13332 VMA_ASSERT(offset + size <= allocationSize);
13334 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
13337 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
13338 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
13339 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
13340 memRange.offset += allocationOffset;
13341 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
13352 case VMA_CACHE_FLUSH:
13353 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
13355 case VMA_CACHE_INVALIDATE:
13356 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
13365 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
13367 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
13369 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
13371 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13372 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
13373 VMA_ASSERT(pDedicatedAllocations);
13374 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
13375 VMA_ASSERT(success);
13378 VkDeviceMemory hMemory = allocation->GetMemory();
13390 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
13392 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
13395 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
13397 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
13398 !hAllocation->CanBecomeLost() &&
13399 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13401 void* pData = VMA_NULL;
13402 VkResult res = Map(hAllocation, &pData);
13403 if(res == VK_SUCCESS)
13405 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
13406 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
13407 Unmap(hAllocation);
13411 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
13416 #if VMA_STATS_STRING_ENABLED 13418 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
13420 bool dedicatedAllocationsStarted =
false;
13421 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13423 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13424 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
13425 VMA_ASSERT(pDedicatedAllocVector);
13426 if(pDedicatedAllocVector->empty() ==
false)
13428 if(dedicatedAllocationsStarted ==
false)
13430 dedicatedAllocationsStarted =
true;
13431 json.WriteString(
"DedicatedAllocations");
13432 json.BeginObject();
13435 json.BeginString(
"Type ");
13436 json.ContinueString(memTypeIndex);
13441 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
13443 json.BeginObject(
true);
13445 hAlloc->PrintParameters(json);
13452 if(dedicatedAllocationsStarted)
13458 bool allocationsStarted =
false;
13459 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13461 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
13463 if(allocationsStarted ==
false)
13465 allocationsStarted =
true;
13466 json.WriteString(
"DefaultPools");
13467 json.BeginObject();
13470 json.BeginString(
"Type ");
13471 json.ContinueString(memTypeIndex);
13474 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
13477 if(allocationsStarted)
13485 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13486 const size_t poolCount = m_Pools.size();
13489 json.WriteString(
"Pools");
13490 json.BeginObject();
13491 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13493 json.BeginString();
13494 json.ContinueString(m_Pools[poolIndex]->GetId());
13497 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
13504 #endif // #if VMA_STATS_STRING_ENABLED 13513 VMA_ASSERT(pCreateInfo && pAllocator);
13514 VMA_DEBUG_LOG(
"vmaCreateAllocator");
13516 return (*pAllocator)->Init(pCreateInfo);
13522 if(allocator != VK_NULL_HANDLE)
13524 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
13525 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
13526 vma_delete(&allocationCallbacks, allocator);
13532 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
13534 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
13535 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
13540 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
13542 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
13543 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
13548 uint32_t memoryTypeIndex,
13549 VkMemoryPropertyFlags* pFlags)
13551 VMA_ASSERT(allocator && pFlags);
13552 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
13553 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
13558 uint32_t frameIndex)
13560 VMA_ASSERT(allocator);
13561 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
13563 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13565 allocator->SetCurrentFrameIndex(frameIndex);
13572 VMA_ASSERT(allocator && pStats);
13573 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13574 allocator->CalculateStats(pStats);
13577 #if VMA_STATS_STRING_ENABLED 13581 char** ppStatsString,
13582 VkBool32 detailedMap)
13584 VMA_ASSERT(allocator && ppStatsString);
13585 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13587 VmaStringBuilder sb(allocator);
13589 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
13590 json.BeginObject();
13593 allocator->CalculateStats(&stats);
13595 json.WriteString(
"Total");
13596 VmaPrintStatInfo(json, stats.
total);
13598 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
13600 json.BeginString(
"Heap ");
13601 json.ContinueString(heapIndex);
13603 json.BeginObject();
13605 json.WriteString(
"Size");
13606 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
13608 json.WriteString(
"Flags");
13609 json.BeginArray(
true);
13610 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
13612 json.WriteString(
"DEVICE_LOCAL");
13618 json.WriteString(
"Stats");
13619 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
13622 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
13624 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
13626 json.BeginString(
"Type ");
13627 json.ContinueString(typeIndex);
13630 json.BeginObject();
13632 json.WriteString(
"Flags");
13633 json.BeginArray(
true);
13634 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
13635 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
13637 json.WriteString(
"DEVICE_LOCAL");
13639 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13641 json.WriteString(
"HOST_VISIBLE");
13643 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
13645 json.WriteString(
"HOST_COHERENT");
13647 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
13649 json.WriteString(
"HOST_CACHED");
13651 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
13653 json.WriteString(
"LAZILY_ALLOCATED");
13659 json.WriteString(
"Stats");
13660 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
13669 if(detailedMap == VK_TRUE)
13671 allocator->PrintDetailedMap(json);
13677 const size_t len = sb.GetLength();
13678 char*
const pChars = vma_new_array(allocator,
char, len + 1);
13681 memcpy(pChars, sb.GetData(), len);
13683 pChars[len] =
'\0';
13684 *ppStatsString = pChars;
13689 char* pStatsString)
13691 if(pStatsString != VMA_NULL)
13693 VMA_ASSERT(allocator);
13694 size_t len = strlen(pStatsString);
13695 vma_delete_array(allocator, pStatsString, len + 1);
13699 #endif // #if VMA_STATS_STRING_ENABLED 13706 uint32_t memoryTypeBits,
13708 uint32_t* pMemoryTypeIndex)
13710 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13711 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13712 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13719 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
13720 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
13725 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13729 switch(pAllocationCreateInfo->
usage)
13734 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13736 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13740 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13743 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13744 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13746 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13750 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13751 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
13757 *pMemoryTypeIndex = UINT32_MAX;
13758 uint32_t minCost = UINT32_MAX;
13759 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
13760 memTypeIndex < allocator->GetMemoryTypeCount();
13761 ++memTypeIndex, memTypeBit <<= 1)
13764 if((memTypeBit & memoryTypeBits) != 0)
13766 const VkMemoryPropertyFlags currFlags =
13767 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
13769 if((requiredFlags & ~currFlags) == 0)
13772 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
13774 if(currCost < minCost)
13776 *pMemoryTypeIndex = memTypeIndex;
13781 minCost = currCost;
13786 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
13791 const VkBufferCreateInfo* pBufferCreateInfo,
13793 uint32_t* pMemoryTypeIndex)
13795 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13796 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
13797 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13798 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13800 const VkDevice hDev = allocator->m_hDevice;
13801 VkBuffer hBuffer = VK_NULL_HANDLE;
13802 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
13803 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
13804 if(res == VK_SUCCESS)
13806 VkMemoryRequirements memReq = {};
13807 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
13808 hDev, hBuffer, &memReq);
13812 memReq.memoryTypeBits,
13813 pAllocationCreateInfo,
13816 allocator->GetVulkanFunctions().vkDestroyBuffer(
13817 hDev, hBuffer, allocator->GetAllocationCallbacks());
13824 const VkImageCreateInfo* pImageCreateInfo,
13826 uint32_t* pMemoryTypeIndex)
13828 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13829 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
13830 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13831 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13833 const VkDevice hDev = allocator->m_hDevice;
13834 VkImage hImage = VK_NULL_HANDLE;
13835 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
13836 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
13837 if(res == VK_SUCCESS)
13839 VkMemoryRequirements memReq = {};
13840 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
13841 hDev, hImage, &memReq);
13845 memReq.memoryTypeBits,
13846 pAllocationCreateInfo,
13849 allocator->GetVulkanFunctions().vkDestroyImage(
13850 hDev, hImage, allocator->GetAllocationCallbacks());
13860 VMA_ASSERT(allocator && pCreateInfo && pPool);
13862 VMA_DEBUG_LOG(
"vmaCreatePool");
13864 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13866 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
13868 #if VMA_RECORDING_ENABLED 13869 if(allocator->GetRecorder() != VMA_NULL)
13871 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
13882 VMA_ASSERT(allocator);
13884 if(pool == VK_NULL_HANDLE)
13889 VMA_DEBUG_LOG(
"vmaDestroyPool");
13891 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13893 #if VMA_RECORDING_ENABLED 13894 if(allocator->GetRecorder() != VMA_NULL)
13896 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
13900 allocator->DestroyPool(pool);
13908 VMA_ASSERT(allocator && pool && pPoolStats);
13910 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13912 allocator->GetPoolStats(pool, pPoolStats);
13918 size_t* pLostAllocationCount)
13920 VMA_ASSERT(allocator && pool);
13922 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13924 #if VMA_RECORDING_ENABLED 13925 if(allocator->GetRecorder() != VMA_NULL)
13927 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
13931 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
13936 VMA_ASSERT(allocator && pool);
13938 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13940 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
13942 return allocator->CheckPoolCorruption(pool);
13947 const VkMemoryRequirements* pVkMemoryRequirements,
13952 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
13954 VMA_DEBUG_LOG(
"vmaAllocateMemory");
13956 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13958 VkResult result = allocator->AllocateMemory(
13959 *pVkMemoryRequirements,
13965 VMA_SUBALLOCATION_TYPE_UNKNOWN,
13968 #if VMA_RECORDING_ENABLED 13969 if(allocator->GetRecorder() != VMA_NULL)
13971 allocator->GetRecorder()->RecordAllocateMemory(
13972 allocator->GetCurrentFrameIndex(),
13973 *pVkMemoryRequirements,
13979 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
13981 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13994 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13996 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
13998 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14000 VkMemoryRequirements vkMemReq = {};
14001 bool requiresDedicatedAllocation =
false;
14002 bool prefersDedicatedAllocation =
false;
14003 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
14004 requiresDedicatedAllocation,
14005 prefersDedicatedAllocation);
14007 VkResult result = allocator->AllocateMemory(
14009 requiresDedicatedAllocation,
14010 prefersDedicatedAllocation,
14014 VMA_SUBALLOCATION_TYPE_BUFFER,
14017 #if VMA_RECORDING_ENABLED 14018 if(allocator->GetRecorder() != VMA_NULL)
14020 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
14021 allocator->GetCurrentFrameIndex(),
14023 requiresDedicatedAllocation,
14024 prefersDedicatedAllocation,
14030 if(pAllocationInfo && result == VK_SUCCESS)
14032 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14045 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
14047 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
14049 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14051 VkMemoryRequirements vkMemReq = {};
14052 bool requiresDedicatedAllocation =
false;
14053 bool prefersDedicatedAllocation =
false;
14054 allocator->GetImageMemoryRequirements(image, vkMemReq,
14055 requiresDedicatedAllocation, prefersDedicatedAllocation);
14057 VkResult result = allocator->AllocateMemory(
14059 requiresDedicatedAllocation,
14060 prefersDedicatedAllocation,
14064 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
14067 #if VMA_RECORDING_ENABLED 14068 if(allocator->GetRecorder() != VMA_NULL)
14070 allocator->GetRecorder()->RecordAllocateMemoryForImage(
14071 allocator->GetCurrentFrameIndex(),
14073 requiresDedicatedAllocation,
14074 prefersDedicatedAllocation,
14080 if(pAllocationInfo && result == VK_SUCCESS)
14082 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14092 VMA_ASSERT(allocator);
14094 if(allocation == VK_NULL_HANDLE)
14099 VMA_DEBUG_LOG(
"vmaFreeMemory");
14101 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14103 #if VMA_RECORDING_ENABLED 14104 if(allocator->GetRecorder() != VMA_NULL)
14106 allocator->GetRecorder()->RecordFreeMemory(
14107 allocator->GetCurrentFrameIndex(),
14112 allocator->FreeMemory(allocation);
14118 VkDeviceSize newSize)
14120 VMA_ASSERT(allocator && allocation);
14122 VMA_DEBUG_LOG(
"vmaResizeAllocation");
14124 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14126 #if VMA_RECORDING_ENABLED 14127 if(allocator->GetRecorder() != VMA_NULL)
14129 allocator->GetRecorder()->RecordResizeAllocation(
14130 allocator->GetCurrentFrameIndex(),
14136 return allocator->ResizeAllocation(allocation, newSize);
14144 VMA_ASSERT(allocator && allocation && pAllocationInfo);
14146 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14148 #if VMA_RECORDING_ENABLED 14149 if(allocator->GetRecorder() != VMA_NULL)
14151 allocator->GetRecorder()->RecordGetAllocationInfo(
14152 allocator->GetCurrentFrameIndex(),
14157 allocator->GetAllocationInfo(allocation, pAllocationInfo);
14164 VMA_ASSERT(allocator && allocation);
14166 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14168 #if VMA_RECORDING_ENABLED 14169 if(allocator->GetRecorder() != VMA_NULL)
14171 allocator->GetRecorder()->RecordTouchAllocation(
14172 allocator->GetCurrentFrameIndex(),
14177 return allocator->TouchAllocation(allocation);
14185 VMA_ASSERT(allocator && allocation);
14187 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14189 allocation->SetUserData(allocator, pUserData);
14191 #if VMA_RECORDING_ENABLED 14192 if(allocator->GetRecorder() != VMA_NULL)
14194 allocator->GetRecorder()->RecordSetAllocationUserData(
14195 allocator->GetCurrentFrameIndex(),
14206 VMA_ASSERT(allocator && pAllocation);
14208 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
14210 allocator->CreateLostAllocation(pAllocation);
14212 #if VMA_RECORDING_ENABLED 14213 if(allocator->GetRecorder() != VMA_NULL)
14215 allocator->GetRecorder()->RecordCreateLostAllocation(
14216 allocator->GetCurrentFrameIndex(),
14227 VMA_ASSERT(allocator && allocation && ppData);
14229 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14231 VkResult res = allocator->Map(allocation, ppData);
14233 #if VMA_RECORDING_ENABLED 14234 if(allocator->GetRecorder() != VMA_NULL)
14236 allocator->GetRecorder()->RecordMapMemory(
14237 allocator->GetCurrentFrameIndex(),
14249 VMA_ASSERT(allocator && allocation);
14251 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14253 #if VMA_RECORDING_ENABLED 14254 if(allocator->GetRecorder() != VMA_NULL)
14256 allocator->GetRecorder()->RecordUnmapMemory(
14257 allocator->GetCurrentFrameIndex(),
14262 allocator->Unmap(allocation);
14267 VMA_ASSERT(allocator && allocation);
14269 VMA_DEBUG_LOG(
"vmaFlushAllocation");
14271 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14273 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
14275 #if VMA_RECORDING_ENABLED 14276 if(allocator->GetRecorder() != VMA_NULL)
14278 allocator->GetRecorder()->RecordFlushAllocation(
14279 allocator->GetCurrentFrameIndex(),
14280 allocation, offset, size);
14287 VMA_ASSERT(allocator && allocation);
14289 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
14291 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14293 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
14295 #if VMA_RECORDING_ENABLED 14296 if(allocator->GetRecorder() != VMA_NULL)
14298 allocator->GetRecorder()->RecordInvalidateAllocation(
14299 allocator->GetCurrentFrameIndex(),
14300 allocation, offset, size);
14307 VMA_ASSERT(allocator);
14309 VMA_DEBUG_LOG(
"vmaCheckCorruption");
14311 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14313 return allocator->CheckCorruption(memoryTypeBits);
14319 size_t allocationCount,
14320 VkBool32* pAllocationsChanged,
14324 VMA_ASSERT(allocator && pAllocations);
14326 VMA_DEBUG_LOG(
"vmaDefragment");
14328 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14330 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
14338 VMA_ASSERT(allocator && allocation && buffer);
14340 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
14342 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14344 return allocator->BindBufferMemory(allocation, buffer);
14352 VMA_ASSERT(allocator && allocation && image);
14354 VMA_DEBUG_LOG(
"vmaBindImageMemory");
14356 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14358 return allocator->BindImageMemory(allocation, image);
14363 const VkBufferCreateInfo* pBufferCreateInfo,
14369 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
14371 if(pBufferCreateInfo->size == 0)
14373 return VK_ERROR_VALIDATION_FAILED_EXT;
14376 VMA_DEBUG_LOG(
"vmaCreateBuffer");
14378 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14380 *pBuffer = VK_NULL_HANDLE;
14381 *pAllocation = VK_NULL_HANDLE;
14384 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
14385 allocator->m_hDevice,
14387 allocator->GetAllocationCallbacks(),
14392 VkMemoryRequirements vkMemReq = {};
14393 bool requiresDedicatedAllocation =
false;
14394 bool prefersDedicatedAllocation =
false;
14395 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
14396 requiresDedicatedAllocation, prefersDedicatedAllocation);
14400 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
14402 VMA_ASSERT(vkMemReq.alignment %
14403 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
14405 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
14407 VMA_ASSERT(vkMemReq.alignment %
14408 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
14410 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
14412 VMA_ASSERT(vkMemReq.alignment %
14413 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
14417 res = allocator->AllocateMemory(
14419 requiresDedicatedAllocation,
14420 prefersDedicatedAllocation,
14423 *pAllocationCreateInfo,
14424 VMA_SUBALLOCATION_TYPE_BUFFER,
14427 #if VMA_RECORDING_ENABLED 14428 if(allocator->GetRecorder() != VMA_NULL)
14430 allocator->GetRecorder()->RecordCreateBuffer(
14431 allocator->GetCurrentFrameIndex(),
14432 *pBufferCreateInfo,
14433 *pAllocationCreateInfo,
14441 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
14445 #if VMA_STATS_STRING_ENABLED 14446 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
14448 if(pAllocationInfo != VMA_NULL)
14450 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14455 allocator->FreeMemory(*pAllocation);
14456 *pAllocation = VK_NULL_HANDLE;
14457 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14458 *pBuffer = VK_NULL_HANDLE;
14461 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14462 *pBuffer = VK_NULL_HANDLE;
14473 VMA_ASSERT(allocator);
14475 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14480 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
14482 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14484 #if VMA_RECORDING_ENABLED 14485 if(allocator->GetRecorder() != VMA_NULL)
14487 allocator->GetRecorder()->RecordDestroyBuffer(
14488 allocator->GetCurrentFrameIndex(),
14493 if(buffer != VK_NULL_HANDLE)
14495 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
14498 if(allocation != VK_NULL_HANDLE)
14500 allocator->FreeMemory(allocation);
14506 const VkImageCreateInfo* pImageCreateInfo,
14512 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
14514 if(pImageCreateInfo->extent.width == 0 ||
14515 pImageCreateInfo->extent.height == 0 ||
14516 pImageCreateInfo->extent.depth == 0 ||
14517 pImageCreateInfo->mipLevels == 0 ||
14518 pImageCreateInfo->arrayLayers == 0)
14520 return VK_ERROR_VALIDATION_FAILED_EXT;
14523 VMA_DEBUG_LOG(
"vmaCreateImage");
14525 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14527 *pImage = VK_NULL_HANDLE;
14528 *pAllocation = VK_NULL_HANDLE;
14531 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
14532 allocator->m_hDevice,
14534 allocator->GetAllocationCallbacks(),
14538 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
14539 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
14540 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
14543 VkMemoryRequirements vkMemReq = {};
14544 bool requiresDedicatedAllocation =
false;
14545 bool prefersDedicatedAllocation =
false;
14546 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
14547 requiresDedicatedAllocation, prefersDedicatedAllocation);
14549 res = allocator->AllocateMemory(
14551 requiresDedicatedAllocation,
14552 prefersDedicatedAllocation,
14555 *pAllocationCreateInfo,
14559 #if VMA_RECORDING_ENABLED 14560 if(allocator->GetRecorder() != VMA_NULL)
14562 allocator->GetRecorder()->RecordCreateImage(
14563 allocator->GetCurrentFrameIndex(),
14565 *pAllocationCreateInfo,
14573 res = allocator->BindImageMemory(*pAllocation, *pImage);
14577 #if VMA_STATS_STRING_ENABLED 14578 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
14580 if(pAllocationInfo != VMA_NULL)
14582 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14587 allocator->FreeMemory(*pAllocation);
14588 *pAllocation = VK_NULL_HANDLE;
14589 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14590 *pImage = VK_NULL_HANDLE;
14593 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14594 *pImage = VK_NULL_HANDLE;
14605 VMA_ASSERT(allocator);
14607 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14612 VMA_DEBUG_LOG(
"vmaDestroyImage");
14614 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14616 #if VMA_RECORDING_ENABLED 14617 if(allocator->GetRecorder() != VMA_NULL)
14619 allocator->GetRecorder()->RecordDestroyImage(
14620 allocator->GetCurrentFrameIndex(),
14625 if(image != VK_NULL_HANDLE)
14627 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
14629 if(allocation != VK_NULL_HANDLE)
14631 allocator->FreeMemory(allocation);
14635 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1586
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1887
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1643
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1617
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2209
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1598
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1844
Definition: vk_mem_alloc.h:1947
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1590
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2309
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1640
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2579
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2098
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1487
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2190
Definition: vk_mem_alloc.h:1924
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1579
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1997
Definition: vk_mem_alloc.h:1871
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1652
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2126
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1705
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1637
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1875
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1777
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1595
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1776
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2583
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1669
VmaStatInfo total
Definition: vk_mem_alloc.h:1786
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2591
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1981
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2574
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1596
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1521
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1646
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2140
Definition: vk_mem_alloc.h:2134
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1712
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2319
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1591
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1615
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2018
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2160
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2196
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1577
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2143
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1822
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2569
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2587
Definition: vk_mem_alloc.h:1861
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2005
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1594
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1782
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1527
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:1965
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1548
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1619
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1553
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2589
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1992
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2206
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1587
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1765
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2155
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1540
Definition: vk_mem_alloc.h:2130
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1931
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1778
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1544
Definition: vk_mem_alloc.h:1955
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2146
Definition: vk_mem_alloc.h:1870
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1593
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1987
Definition: vk_mem_alloc.h:1978
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1768
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1589
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2168
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1655
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2199
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1976
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2011
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1693
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1784
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1911
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1777
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1600
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1625
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1542
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1599
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2182
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1592
Definition: vk_mem_alloc.h:1942
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1633
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2333
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1649
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1777
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1774
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2187
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
Definition: vk_mem_alloc.h:1951
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2314
Definition: vk_mem_alloc.h:1962
Definition: vk_mem_alloc.h:1974
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2585
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1585
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1772
Definition: vk_mem_alloc.h:1827
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2136
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1622
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1770
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1597
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1601
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1898
Definition: vk_mem_alloc.h:1969
Definition: vk_mem_alloc.h:1854
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2328
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1575
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1588
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2115
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2295
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1959
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2080
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1778
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1937
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1609
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1785
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2193
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1778
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2300