23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1359 #include <vulkan/vulkan.h> 1361 #if !defined(VMA_DEDICATED_ALLOCATION) 1362 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1363 #define VMA_DEDICATED_ALLOCATION 1 1365 #define VMA_DEDICATED_ALLOCATION 0 1383 uint32_t memoryType,
1384 VkDeviceMemory memory,
1389 uint32_t memoryType,
1390 VkDeviceMemory memory,
1462 #if VMA_DEDICATED_ALLOCATION 1463 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1464 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1485 #ifndef VMA_RECORDING_ENABLED 1487 #define VMA_RECORDING_ENABLED 1 1489 #define VMA_RECORDING_ENABLED 0 1602 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1610 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1620 uint32_t memoryTypeIndex,
1621 VkMemoryPropertyFlags* pFlags);
1633 uint32_t frameIndex);
1666 #define VMA_STATS_STRING_ENABLED 1 1668 #if VMA_STATS_STRING_ENABLED 1675 char** ppStatsString,
1676 VkBool32 detailedMap);
1680 char* pStatsString);
1682 #endif // #if VMA_STATS_STRING_ENABLED 1911 uint32_t memoryTypeBits,
1913 uint32_t* pMemoryTypeIndex);
1929 const VkBufferCreateInfo* pBufferCreateInfo,
1931 uint32_t* pMemoryTypeIndex);
1947 const VkImageCreateInfo* pImageCreateInfo,
1949 uint32_t* pMemoryTypeIndex);
2111 size_t* pLostAllocationCount);
2210 const VkMemoryRequirements* pVkMemoryRequirements,
2520 size_t allocationCount,
2521 VkBool32* pAllocationsChanged,
2587 const VkBufferCreateInfo* pBufferCreateInfo,
2612 const VkImageCreateInfo* pImageCreateInfo,
2638 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2641 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2642 #define VMA_IMPLEMENTATION 2645 #ifdef VMA_IMPLEMENTATION 2646 #undef VMA_IMPLEMENTATION 2668 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2669 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2681 #if VMA_USE_STL_CONTAINERS 2682 #define VMA_USE_STL_VECTOR 1 2683 #define VMA_USE_STL_UNORDERED_MAP 1 2684 #define VMA_USE_STL_LIST 1 2687 #if VMA_USE_STL_VECTOR 2691 #if VMA_USE_STL_UNORDERED_MAP 2692 #include <unordered_map> 2695 #if VMA_USE_STL_LIST 2704 #include <algorithm> 2710 #define VMA_NULL nullptr 2713 #if defined(__APPLE__) || defined(__ANDROID__) 2715 void *aligned_alloc(
size_t alignment,
size_t size)
2718 if(alignment <
sizeof(
void*))
2720 alignment =
sizeof(
void*);
2724 if(posix_memalign(&pointer, alignment, size) == 0)
2738 #define VMA_ASSERT(expr) assert(expr) 2740 #define VMA_ASSERT(expr) 2746 #ifndef VMA_HEAVY_ASSERT 2748 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2750 #define VMA_HEAVY_ASSERT(expr) 2754 #ifndef VMA_ALIGN_OF 2755 #define VMA_ALIGN_OF(type) (__alignof(type)) 2758 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2760 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2762 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2766 #ifndef VMA_SYSTEM_FREE 2768 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2770 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2775 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2779 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2783 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2787 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2790 #ifndef VMA_DEBUG_LOG 2791 #define VMA_DEBUG_LOG(format, ...) 2801 #if VMA_STATS_STRING_ENABLED 2802 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2804 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2806 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2808 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2810 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2812 snprintf(outStr, strLen,
"%p", ptr);
2822 void Lock() { m_Mutex.lock(); }
2823 void Unlock() { m_Mutex.unlock(); }
2827 #define VMA_MUTEX VmaMutex 2838 #ifndef VMA_ATOMIC_UINT32 2839 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2842 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2847 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2850 #ifndef VMA_DEBUG_ALIGNMENT 2855 #define VMA_DEBUG_ALIGNMENT (1) 2858 #ifndef VMA_DEBUG_MARGIN 2863 #define VMA_DEBUG_MARGIN (0) 2866 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2871 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2874 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2880 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2883 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2888 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2891 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2896 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2899 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2900 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2904 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2905 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2909 #ifndef VMA_CLASS_NO_COPY 2910 #define VMA_CLASS_NO_COPY(className) \ 2912 className(const className&) = delete; \ 2913 className& operator=(const className&) = delete; 2916 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2919 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
2921 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
2922 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
2928 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2929 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2932 static inline uint32_t VmaCountBitsSet(uint32_t v)
2934 uint32_t c = v - ((v >> 1) & 0x55555555);
2935 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2936 c = ((c >> 4) + c) & 0x0F0F0F0F;
2937 c = ((c >> 8) + c) & 0x00FF00FF;
2938 c = ((c >> 16) + c) & 0x0000FFFF;
2944 template <
typename T>
2945 static inline T VmaAlignUp(T val, T align)
2947 return (val + align - 1) / align * align;
2951 template <
typename T>
2952 static inline T VmaAlignDown(T val, T align)
2954 return val / align * align;
2958 template <
typename T>
2959 static inline T VmaRoundDiv(T x, T y)
2961 return (x + (y / (T)2)) / y;
2969 template <
typename T>
2970 inline bool VmaIsPow2(T x)
2972 return (x & (x-1)) == 0;
2976 static inline uint32_t VmaNextPow2(uint32_t v)
2987 static inline uint64_t VmaNextPow2(uint64_t v)
3000 static inline bool VmaStrIsEmpty(
const char* pStr)
3002 return pStr == VMA_NULL || *pStr ==
'\0';
3005 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3023 template<
typename Iterator,
typename Compare>
3024 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3026 Iterator centerValue = end; --centerValue;
3027 Iterator insertIndex = beg;
3028 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3030 if(cmp(*memTypeIndex, *centerValue))
3032 if(insertIndex != memTypeIndex)
3034 VMA_SWAP(*memTypeIndex, *insertIndex);
3039 if(insertIndex != centerValue)
3041 VMA_SWAP(*insertIndex, *centerValue);
3046 template<
typename Iterator,
typename Compare>
3047 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3051 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3052 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3053 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3057 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3059 #endif // #ifndef VMA_SORT 3068 static inline bool VmaBlocksOnSamePage(
3069 VkDeviceSize resourceAOffset,
3070 VkDeviceSize resourceASize,
3071 VkDeviceSize resourceBOffset,
3072 VkDeviceSize pageSize)
3074 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3075 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3076 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3077 VkDeviceSize resourceBStart = resourceBOffset;
3078 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3079 return resourceAEndPage == resourceBStartPage;
3082 enum VmaSuballocationType
3084 VMA_SUBALLOCATION_TYPE_FREE = 0,
3085 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3086 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3087 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3088 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3089 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3090 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3099 static inline bool VmaIsBufferImageGranularityConflict(
3100 VmaSuballocationType suballocType1,
3101 VmaSuballocationType suballocType2)
3103 if(suballocType1 > suballocType2)
3105 VMA_SWAP(suballocType1, suballocType2);
3108 switch(suballocType1)
3110 case VMA_SUBALLOCATION_TYPE_FREE:
3112 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3114 case VMA_SUBALLOCATION_TYPE_BUFFER:
3116 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3117 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3118 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3120 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3121 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3122 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3123 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3125 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3126 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3134 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3136 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3137 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3141 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3143 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3148 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3150 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3151 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3155 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3157 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3169 VMA_CLASS_NO_COPY(VmaMutexLock)
3171 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3172 m_pMutex(useMutex ? &mutex : VMA_NULL)
3189 VMA_MUTEX* m_pMutex;
3192 #if VMA_DEBUG_GLOBAL_MUTEX 3193 static VMA_MUTEX gDebugGlobalMutex;
3194 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3196 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3200 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3211 template <
typename CmpLess,
typename IterT,
typename KeyT>
3212 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3214 size_t down = 0, up = (end - beg);
3217 const size_t mid = (down + up) / 2;
3218 if(cmp(*(beg+mid), key))
3233 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3235 if((pAllocationCallbacks != VMA_NULL) &&
3236 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3238 return (*pAllocationCallbacks->pfnAllocation)(
3239 pAllocationCallbacks->pUserData,
3242 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3246 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3250 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3252 if((pAllocationCallbacks != VMA_NULL) &&
3253 (pAllocationCallbacks->pfnFree != VMA_NULL))
3255 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3259 VMA_SYSTEM_FREE(ptr);
3263 template<
typename T>
3264 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3266 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3269 template<
typename T>
3270 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3272 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3275 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3277 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3279 template<
typename T>
3280 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3283 VmaFree(pAllocationCallbacks, ptr);
3286 template<
typename T>
3287 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3291 for(
size_t i = count; i--; )
3295 VmaFree(pAllocationCallbacks, ptr);
3300 template<
typename T>
3301 class VmaStlAllocator
3304 const VkAllocationCallbacks*
const m_pCallbacks;
3305 typedef T value_type;
3307 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3308 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3310 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3311 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3313 template<
typename U>
3314 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3316 return m_pCallbacks == rhs.m_pCallbacks;
3318 template<
typename U>
3319 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3321 return m_pCallbacks != rhs.m_pCallbacks;
3324 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3327 #if VMA_USE_STL_VECTOR 3329 #define VmaVector std::vector 3331 template<
typename T,
typename allocatorT>
3332 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3334 vec.insert(vec.begin() + index, item);
3337 template<
typename T,
typename allocatorT>
3338 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3340 vec.erase(vec.begin() + index);
3343 #else // #if VMA_USE_STL_VECTOR 3348 template<
typename T,
typename AllocatorT>
3352 typedef T value_type;
3354 VmaVector(
const AllocatorT& allocator) :
3355 m_Allocator(allocator),
3362 VmaVector(
size_t count,
const AllocatorT& allocator) :
3363 m_Allocator(allocator),
3364 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3370 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3371 m_Allocator(src.m_Allocator),
3372 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3373 m_Count(src.m_Count),
3374 m_Capacity(src.m_Count)
3378 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3384 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3387 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3391 resize(rhs.m_Count);
3394 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3400 bool empty()
const {
return m_Count == 0; }
3401 size_t size()
const {
return m_Count; }
3402 T* data() {
return m_pArray; }
3403 const T* data()
const {
return m_pArray; }
3405 T& operator[](
size_t index)
3407 VMA_HEAVY_ASSERT(index < m_Count);
3408 return m_pArray[index];
3410 const T& operator[](
size_t index)
const 3412 VMA_HEAVY_ASSERT(index < m_Count);
3413 return m_pArray[index];
3418 VMA_HEAVY_ASSERT(m_Count > 0);
3421 const T& front()
const 3423 VMA_HEAVY_ASSERT(m_Count > 0);
3428 VMA_HEAVY_ASSERT(m_Count > 0);
3429 return m_pArray[m_Count - 1];
3431 const T& back()
const 3433 VMA_HEAVY_ASSERT(m_Count > 0);
3434 return m_pArray[m_Count - 1];
3437 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3439 newCapacity = VMA_MAX(newCapacity, m_Count);
3441 if((newCapacity < m_Capacity) && !freeMemory)
3443 newCapacity = m_Capacity;
3446 if(newCapacity != m_Capacity)
3448 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3451 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3453 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3454 m_Capacity = newCapacity;
3455 m_pArray = newArray;
3459 void resize(
size_t newCount,
bool freeMemory =
false)
3461 size_t newCapacity = m_Capacity;
3462 if(newCount > m_Capacity)
3464 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3468 newCapacity = newCount;
3471 if(newCapacity != m_Capacity)
3473 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3474 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3475 if(elementsToCopy != 0)
3477 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3479 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3480 m_Capacity = newCapacity;
3481 m_pArray = newArray;
3487 void clear(
bool freeMemory =
false)
3489 resize(0, freeMemory);
3492 void insert(
size_t index,
const T& src)
3494 VMA_HEAVY_ASSERT(index <= m_Count);
3495 const size_t oldCount = size();
3496 resize(oldCount + 1);
3497 if(index < oldCount)
3499 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3501 m_pArray[index] = src;
3504 void remove(
size_t index)
3506 VMA_HEAVY_ASSERT(index < m_Count);
3507 const size_t oldCount = size();
3508 if(index < oldCount - 1)
3510 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3512 resize(oldCount - 1);
3515 void push_back(
const T& src)
3517 const size_t newIndex = size();
3518 resize(newIndex + 1);
3519 m_pArray[newIndex] = src;
3524 VMA_HEAVY_ASSERT(m_Count > 0);
3528 void push_front(
const T& src)
3535 VMA_HEAVY_ASSERT(m_Count > 0);
3539 typedef T* iterator;
3541 iterator begin() {
return m_pArray; }
3542 iterator end() {
return m_pArray + m_Count; }
3545 AllocatorT m_Allocator;
3551 template<
typename T,
typename allocatorT>
3552 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3554 vec.insert(index, item);
3557 template<
typename T,
typename allocatorT>
3558 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3563 #endif // #if VMA_USE_STL_VECTOR 3565 template<
typename CmpLess,
typename VectorT>
3566 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3568 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3570 vector.data() + vector.size(),
3572 CmpLess()) - vector.data();
3573 VmaVectorInsert(vector, indexToInsert, value);
3574 return indexToInsert;
3577 template<
typename CmpLess,
typename VectorT>
3578 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3581 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3586 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3588 size_t indexToRemove = it - vector.begin();
3589 VmaVectorRemove(vector, indexToRemove);
3595 template<
typename CmpLess,
typename IterT,
typename KeyT>
3596 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3599 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3600 beg, end, value, comparator);
3602 (!comparator(*it, value) && !comparator(value, *it)))
3617 template<
typename T>
3618 class VmaPoolAllocator
3620 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3622 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3623 ~VmaPoolAllocator();
3631 uint32_t NextFreeIndex;
3638 uint32_t FirstFreeIndex;
3641 const VkAllocationCallbacks* m_pAllocationCallbacks;
3642 size_t m_ItemsPerBlock;
3643 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3645 ItemBlock& CreateNewBlock();
3648 template<
typename T>
3649 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3650 m_pAllocationCallbacks(pAllocationCallbacks),
3651 m_ItemsPerBlock(itemsPerBlock),
3652 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3654 VMA_ASSERT(itemsPerBlock > 0);
3657 template<
typename T>
3658 VmaPoolAllocator<T>::~VmaPoolAllocator()
3663 template<
typename T>
3664 void VmaPoolAllocator<T>::Clear()
3666 for(
size_t i = m_ItemBlocks.size(); i--; )
3667 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3668 m_ItemBlocks.clear();
3671 template<
typename T>
3672 T* VmaPoolAllocator<T>::Alloc()
3674 for(
size_t i = m_ItemBlocks.size(); i--; )
3676 ItemBlock& block = m_ItemBlocks[i];
3678 if(block.FirstFreeIndex != UINT32_MAX)
3680 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3681 block.FirstFreeIndex = pItem->NextFreeIndex;
3682 return &pItem->Value;
3687 ItemBlock& newBlock = CreateNewBlock();
3688 Item*
const pItem = &newBlock.pItems[0];
3689 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3690 return &pItem->Value;
3693 template<
typename T>
3694 void VmaPoolAllocator<T>::Free(T* ptr)
3697 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3699 ItemBlock& block = m_ItemBlocks[i];
3703 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3706 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3708 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3709 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3710 block.FirstFreeIndex = index;
3714 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3717 template<
typename T>
3718 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3720 ItemBlock newBlock = {
3721 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3723 m_ItemBlocks.push_back(newBlock);
3726 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3727 newBlock.pItems[i].NextFreeIndex = i + 1;
3728 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3729 return m_ItemBlocks.back();
3735 #if VMA_USE_STL_LIST 3737 #define VmaList std::list 3739 #else // #if VMA_USE_STL_LIST 3741 template<
typename T>
3750 template<
typename T>
3753 VMA_CLASS_NO_COPY(VmaRawList)
3755 typedef VmaListItem<T> ItemType;
3757 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3761 size_t GetCount()
const {
return m_Count; }
3762 bool IsEmpty()
const {
return m_Count == 0; }
3764 ItemType* Front() {
return m_pFront; }
3765 const ItemType* Front()
const {
return m_pFront; }
3766 ItemType* Back() {
return m_pBack; }
3767 const ItemType* Back()
const {
return m_pBack; }
3769 ItemType* PushBack();
3770 ItemType* PushFront();
3771 ItemType* PushBack(
const T& value);
3772 ItemType* PushFront(
const T& value);
3777 ItemType* InsertBefore(ItemType* pItem);
3779 ItemType* InsertAfter(ItemType* pItem);
3781 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3782 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3784 void Remove(ItemType* pItem);
3787 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3788 VmaPoolAllocator<ItemType> m_ItemAllocator;
3794 template<
typename T>
3795 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3796 m_pAllocationCallbacks(pAllocationCallbacks),
3797 m_ItemAllocator(pAllocationCallbacks, 128),
3804 template<
typename T>
3805 VmaRawList<T>::~VmaRawList()
3811 template<
typename T>
3812 void VmaRawList<T>::Clear()
3814 if(IsEmpty() ==
false)
3816 ItemType* pItem = m_pBack;
3817 while(pItem != VMA_NULL)
3819 ItemType*
const pPrevItem = pItem->pPrev;
3820 m_ItemAllocator.Free(pItem);
3823 m_pFront = VMA_NULL;
3829 template<
typename T>
3830 VmaListItem<T>* VmaRawList<T>::PushBack()
3832 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3833 pNewItem->pNext = VMA_NULL;
3836 pNewItem->pPrev = VMA_NULL;
3837 m_pFront = pNewItem;
3843 pNewItem->pPrev = m_pBack;
3844 m_pBack->pNext = pNewItem;
3851 template<
typename T>
3852 VmaListItem<T>* VmaRawList<T>::PushFront()
3854 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3855 pNewItem->pPrev = VMA_NULL;
3858 pNewItem->pNext = VMA_NULL;
3859 m_pFront = pNewItem;
3865 pNewItem->pNext = m_pFront;
3866 m_pFront->pPrev = pNewItem;
3867 m_pFront = pNewItem;
3873 template<
typename T>
3874 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3876 ItemType*
const pNewItem = PushBack();
3877 pNewItem->Value = value;
3881 template<
typename T>
3882 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3884 ItemType*
const pNewItem = PushFront();
3885 pNewItem->Value = value;
3889 template<
typename T>
3890 void VmaRawList<T>::PopBack()
3892 VMA_HEAVY_ASSERT(m_Count > 0);
3893 ItemType*
const pBackItem = m_pBack;
3894 ItemType*
const pPrevItem = pBackItem->pPrev;
3895 if(pPrevItem != VMA_NULL)
3897 pPrevItem->pNext = VMA_NULL;
3899 m_pBack = pPrevItem;
3900 m_ItemAllocator.Free(pBackItem);
3904 template<
typename T>
3905 void VmaRawList<T>::PopFront()
3907 VMA_HEAVY_ASSERT(m_Count > 0);
3908 ItemType*
const pFrontItem = m_pFront;
3909 ItemType*
const pNextItem = pFrontItem->pNext;
3910 if(pNextItem != VMA_NULL)
3912 pNextItem->pPrev = VMA_NULL;
3914 m_pFront = pNextItem;
3915 m_ItemAllocator.Free(pFrontItem);
3919 template<
typename T>
3920 void VmaRawList<T>::Remove(ItemType* pItem)
3922 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3923 VMA_HEAVY_ASSERT(m_Count > 0);
3925 if(pItem->pPrev != VMA_NULL)
3927 pItem->pPrev->pNext = pItem->pNext;
3931 VMA_HEAVY_ASSERT(m_pFront == pItem);
3932 m_pFront = pItem->pNext;
3935 if(pItem->pNext != VMA_NULL)
3937 pItem->pNext->pPrev = pItem->pPrev;
3941 VMA_HEAVY_ASSERT(m_pBack == pItem);
3942 m_pBack = pItem->pPrev;
3945 m_ItemAllocator.Free(pItem);
3949 template<
typename T>
3950 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3952 if(pItem != VMA_NULL)
3954 ItemType*
const prevItem = pItem->pPrev;
3955 ItemType*
const newItem = m_ItemAllocator.Alloc();
3956 newItem->pPrev = prevItem;
3957 newItem->pNext = pItem;
3958 pItem->pPrev = newItem;
3959 if(prevItem != VMA_NULL)
3961 prevItem->pNext = newItem;
3965 VMA_HEAVY_ASSERT(m_pFront == pItem);
3975 template<
typename T>
3976 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3978 if(pItem != VMA_NULL)
3980 ItemType*
const nextItem = pItem->pNext;
3981 ItemType*
const newItem = m_ItemAllocator.Alloc();
3982 newItem->pNext = nextItem;
3983 newItem->pPrev = pItem;
3984 pItem->pNext = newItem;
3985 if(nextItem != VMA_NULL)
3987 nextItem->pPrev = newItem;
3991 VMA_HEAVY_ASSERT(m_pBack == pItem);
4001 template<
typename T>
4002 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4004 ItemType*
const newItem = InsertBefore(pItem);
4005 newItem->Value = value;
4009 template<
typename T>
4010 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4012 ItemType*
const newItem = InsertAfter(pItem);
4013 newItem->Value = value;
4017 template<
typename T,
typename AllocatorT>
4020 VMA_CLASS_NO_COPY(VmaList)
4031 T& operator*()
const 4033 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4034 return m_pItem->Value;
4036 T* operator->()
const 4038 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4039 return &m_pItem->Value;
4042 iterator& operator++()
4044 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4045 m_pItem = m_pItem->pNext;
4048 iterator& operator--()
4050 if(m_pItem != VMA_NULL)
4052 m_pItem = m_pItem->pPrev;
4056 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4057 m_pItem = m_pList->Back();
4062 iterator operator++(
int)
4064 iterator result = *
this;
4068 iterator operator--(
int)
4070 iterator result = *
this;
4075 bool operator==(
const iterator& rhs)
const 4077 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4078 return m_pItem == rhs.m_pItem;
4080 bool operator!=(
const iterator& rhs)
const 4082 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4083 return m_pItem != rhs.m_pItem;
4087 VmaRawList<T>* m_pList;
4088 VmaListItem<T>* m_pItem;
4090 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4096 friend class VmaList<T, AllocatorT>;
4099 class const_iterator
4108 const_iterator(
const iterator& src) :
4109 m_pList(src.m_pList),
4110 m_pItem(src.m_pItem)
4114 const T& operator*()
const 4116 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4117 return m_pItem->Value;
4119 const T* operator->()
const 4121 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4122 return &m_pItem->Value;
4125 const_iterator& operator++()
4127 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4128 m_pItem = m_pItem->pNext;
4131 const_iterator& operator--()
4133 if(m_pItem != VMA_NULL)
4135 m_pItem = m_pItem->pPrev;
4139 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4140 m_pItem = m_pList->Back();
4145 const_iterator operator++(
int)
4147 const_iterator result = *
this;
4151 const_iterator operator--(
int)
4153 const_iterator result = *
this;
4158 bool operator==(
const const_iterator& rhs)
const 4160 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4161 return m_pItem == rhs.m_pItem;
4163 bool operator!=(
const const_iterator& rhs)
const 4165 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4166 return m_pItem != rhs.m_pItem;
4170 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4176 const VmaRawList<T>* m_pList;
4177 const VmaListItem<T>* m_pItem;
4179 friend class VmaList<T, AllocatorT>;
4182 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4184 bool empty()
const {
return m_RawList.IsEmpty(); }
4185 size_t size()
const {
return m_RawList.GetCount(); }
4187 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4188 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4190 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4191 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4193 void clear() { m_RawList.Clear(); }
4194 void push_back(
const T& value) { m_RawList.PushBack(value); }
4195 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4196 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4199 VmaRawList<T> m_RawList;
4202 #endif // #if VMA_USE_STL_LIST 4210 #if VMA_USE_STL_UNORDERED_MAP 4212 #define VmaPair std::pair 4214 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4215 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4217 #else // #if VMA_USE_STL_UNORDERED_MAP 4219 template<
typename T1,
typename T2>
4225 VmaPair() : first(), second() { }
4226 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4232 template<
typename KeyT,
typename ValueT>
4236 typedef VmaPair<KeyT, ValueT> PairType;
4237 typedef PairType* iterator;
4239 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4241 iterator begin() {
return m_Vector.begin(); }
4242 iterator end() {
return m_Vector.end(); }
4244 void insert(
const PairType& pair);
4245 iterator find(
const KeyT& key);
4246 void erase(iterator it);
4249 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4252 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4254 template<
typename FirstT,
typename SecondT>
4255 struct VmaPairFirstLess
4257 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4259 return lhs.first < rhs.first;
4261 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4263 return lhs.first < rhsFirst;
4267 template<
typename KeyT,
typename ValueT>
4268 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4270 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4272 m_Vector.data() + m_Vector.size(),
4274 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4275 VmaVectorInsert(m_Vector, indexToInsert, pair);
4278 template<
typename KeyT,
typename ValueT>
4279 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4281 PairType* it = VmaBinaryFindFirstNotLess(
4283 m_Vector.data() + m_Vector.size(),
4285 VmaPairFirstLess<KeyT, ValueT>());
4286 if((it != m_Vector.end()) && (it->first == key))
4292 return m_Vector.end();
4296 template<
typename KeyT,
typename ValueT>
4297 void VmaMap<KeyT, ValueT>::erase(iterator it)
4299 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4302 #endif // #if VMA_USE_STL_UNORDERED_MAP 4308 class VmaDeviceMemoryBlock;
4310 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4312 struct VmaAllocation_T
4314 VMA_CLASS_NO_COPY(VmaAllocation_T)
4316 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4320 FLAG_USER_DATA_STRING = 0x01,
4324 enum ALLOCATION_TYPE
4326 ALLOCATION_TYPE_NONE,
4327 ALLOCATION_TYPE_BLOCK,
4328 ALLOCATION_TYPE_DEDICATED,
4331 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4334 m_pUserData(VMA_NULL),
4335 m_LastUseFrameIndex(currentFrameIndex),
4336 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4337 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4339 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4341 #if VMA_STATS_STRING_ENABLED 4342 m_CreationFrameIndex = currentFrameIndex;
4343 m_BufferImageUsage = 0;
4349 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4352 VMA_ASSERT(m_pUserData == VMA_NULL);
4355 void InitBlockAllocation(
4357 VmaDeviceMemoryBlock* block,
4358 VkDeviceSize offset,
4359 VkDeviceSize alignment,
4361 VmaSuballocationType suballocationType,
4365 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4366 VMA_ASSERT(block != VMA_NULL);
4367 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4368 m_Alignment = alignment;
4370 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4371 m_SuballocationType = (uint8_t)suballocationType;
4372 m_BlockAllocation.m_hPool = hPool;
4373 m_BlockAllocation.m_Block = block;
4374 m_BlockAllocation.m_Offset = offset;
4375 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4380 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4381 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4382 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4383 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4384 m_BlockAllocation.m_Block = VMA_NULL;
4385 m_BlockAllocation.m_Offset = 0;
4386 m_BlockAllocation.m_CanBecomeLost =
true;
4389 void ChangeBlockAllocation(
4391 VmaDeviceMemoryBlock* block,
4392 VkDeviceSize offset);
4395 void InitDedicatedAllocation(
4396 uint32_t memoryTypeIndex,
4397 VkDeviceMemory hMemory,
4398 VmaSuballocationType suballocationType,
4402 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4403 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4404 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4407 m_SuballocationType = (uint8_t)suballocationType;
4408 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4409 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4410 m_DedicatedAllocation.m_hMemory = hMemory;
4411 m_DedicatedAllocation.m_pMappedData = pMappedData;
4414 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4415 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4416 VkDeviceSize GetSize()
const {
return m_Size; }
4417 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4418 void* GetUserData()
const {
return m_pUserData; }
4419 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4420 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4422 VmaDeviceMemoryBlock* GetBlock()
const 4424 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4425 return m_BlockAllocation.m_Block;
4427 VkDeviceSize GetOffset()
const;
4428 VkDeviceMemory GetMemory()
const;
4429 uint32_t GetMemoryTypeIndex()
const;
4430 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4431 void* GetMappedData()
const;
4432 bool CanBecomeLost()
const;
4435 uint32_t GetLastUseFrameIndex()
const 4437 return m_LastUseFrameIndex.load();
4439 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4441 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4451 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4453 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4455 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4466 void BlockAllocMap();
4467 void BlockAllocUnmap();
4468 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4471 #if VMA_STATS_STRING_ENABLED 4472 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4473 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4475 void InitBufferImageUsage(uint32_t bufferImageUsage)
4477 VMA_ASSERT(m_BufferImageUsage == 0);
4478 m_BufferImageUsage = bufferImageUsage;
4481 void PrintParameters(
class VmaJsonWriter& json)
const;
4485 VkDeviceSize m_Alignment;
4486 VkDeviceSize m_Size;
4488 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4490 uint8_t m_SuballocationType;
4497 struct BlockAllocation
4500 VmaDeviceMemoryBlock* m_Block;
4501 VkDeviceSize m_Offset;
4502 bool m_CanBecomeLost;
4506 struct DedicatedAllocation
4508 uint32_t m_MemoryTypeIndex;
4509 VkDeviceMemory m_hMemory;
4510 void* m_pMappedData;
4516 BlockAllocation m_BlockAllocation;
4518 DedicatedAllocation m_DedicatedAllocation;
4521 #if VMA_STATS_STRING_ENABLED 4522 uint32_t m_CreationFrameIndex;
4523 uint32_t m_BufferImageUsage;
4533 struct VmaSuballocation
4535 VkDeviceSize offset;
4538 VmaSuballocationType type;
4542 struct VmaSuballocationOffsetLess
4544 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4546 return lhs.offset < rhs.offset;
4549 struct VmaSuballocationOffsetGreater
4551 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4553 return lhs.offset > rhs.offset;
4557 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4560 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4575 struct VmaAllocationRequest
4577 VkDeviceSize offset;
4578 VkDeviceSize sumFreeSize;
4579 VkDeviceSize sumItemSize;
4580 VmaSuballocationList::iterator item;
4581 size_t itemsToMakeLostCount;
4584 VkDeviceSize CalcCost()
const 4586 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4594 class VmaBlockMetadata
4597 VmaBlockMetadata() : m_Size(0) { }
4598 virtual ~VmaBlockMetadata() { }
4599 virtual void Init(VkDeviceSize size) { m_Size = size; }
4602 virtual bool Validate()
const = 0;
4603 VkDeviceSize GetSize()
const {
return m_Size; }
4604 virtual size_t GetAllocationCount()
const = 0;
4605 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4606 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4608 virtual bool IsEmpty()
const = 0;
4610 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4612 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4614 #if VMA_STATS_STRING_ENABLED 4615 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4621 virtual bool CreateAllocationRequest(
4622 uint32_t currentFrameIndex,
4623 uint32_t frameInUseCount,
4624 VkDeviceSize bufferImageGranularity,
4625 VkDeviceSize allocSize,
4626 VkDeviceSize allocAlignment,
4628 VmaSuballocationType allocType,
4629 bool canMakeOtherLost,
4631 VmaAllocationRequest* pAllocationRequest) = 0;
4633 virtual bool MakeRequestedAllocationsLost(
4634 uint32_t currentFrameIndex,
4635 uint32_t frameInUseCount,
4636 VmaAllocationRequest* pAllocationRequest) = 0;
4638 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4640 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4644 const VmaAllocationRequest& request,
4645 VmaSuballocationType type,
4646 VkDeviceSize allocSize,
4652 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4655 #if VMA_STATS_STRING_ENABLED 4656 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4657 VkDeviceSize unusedBytes,
4658 size_t allocationCount,
4659 size_t unusedRangeCount)
const;
4660 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4661 VkDeviceSize offset,
4663 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4664 VkDeviceSize offset,
4665 VkDeviceSize size)
const;
4666 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4670 VkDeviceSize m_Size;
4673 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4675 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4678 virtual ~VmaBlockMetadata_Generic();
4679 virtual void Init(VkDeviceSize size);
4681 virtual bool Validate()
const;
4682 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4683 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4684 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4685 virtual bool IsEmpty()
const;
4687 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4688 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4690 #if VMA_STATS_STRING_ENABLED 4691 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4694 virtual bool CreateAllocationRequest(
4695 uint32_t currentFrameIndex,
4696 uint32_t frameInUseCount,
4697 VkDeviceSize bufferImageGranularity,
4698 VkDeviceSize allocSize,
4699 VkDeviceSize allocAlignment,
4701 VmaSuballocationType allocType,
4702 bool canMakeOtherLost,
4704 VmaAllocationRequest* pAllocationRequest);
4706 virtual bool MakeRequestedAllocationsLost(
4707 uint32_t currentFrameIndex,
4708 uint32_t frameInUseCount,
4709 VmaAllocationRequest* pAllocationRequest);
4711 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4713 virtual VkResult CheckCorruption(
const void* pBlockData);
4716 const VmaAllocationRequest& request,
4717 VmaSuballocationType type,
4718 VkDeviceSize allocSize,
4723 virtual void FreeAtOffset(VkDeviceSize offset);
4726 uint32_t m_FreeCount;
4727 VkDeviceSize m_SumFreeSize;
4728 VmaSuballocationList m_Suballocations;
4731 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4733 bool ValidateFreeSuballocationList()
const;
4737 bool CheckAllocation(
4738 uint32_t currentFrameIndex,
4739 uint32_t frameInUseCount,
4740 VkDeviceSize bufferImageGranularity,
4741 VkDeviceSize allocSize,
4742 VkDeviceSize allocAlignment,
4743 VmaSuballocationType allocType,
4744 VmaSuballocationList::const_iterator suballocItem,
4745 bool canMakeOtherLost,
4746 VkDeviceSize* pOffset,
4747 size_t* itemsToMakeLostCount,
4748 VkDeviceSize* pSumFreeSize,
4749 VkDeviceSize* pSumItemSize)
const;
4751 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4755 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4758 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4761 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4842 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
4844 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
4847 virtual ~VmaBlockMetadata_Linear();
4848 virtual void Init(VkDeviceSize size);
4850 virtual bool Validate()
const;
4851 virtual size_t GetAllocationCount()
const;
4852 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4853 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4854 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
4856 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4857 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4859 #if VMA_STATS_STRING_ENABLED 4860 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4863 virtual bool CreateAllocationRequest(
4864 uint32_t currentFrameIndex,
4865 uint32_t frameInUseCount,
4866 VkDeviceSize bufferImageGranularity,
4867 VkDeviceSize allocSize,
4868 VkDeviceSize allocAlignment,
4870 VmaSuballocationType allocType,
4871 bool canMakeOtherLost,
4873 VmaAllocationRequest* pAllocationRequest);
4875 virtual bool MakeRequestedAllocationsLost(
4876 uint32_t currentFrameIndex,
4877 uint32_t frameInUseCount,
4878 VmaAllocationRequest* pAllocationRequest);
4880 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4882 virtual VkResult CheckCorruption(
const void* pBlockData);
4885 const VmaAllocationRequest& request,
4886 VmaSuballocationType type,
4887 VkDeviceSize allocSize,
4892 virtual void FreeAtOffset(VkDeviceSize offset);
4902 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
4904 enum SECOND_VECTOR_MODE
4906 SECOND_VECTOR_EMPTY,
4911 SECOND_VECTOR_RING_BUFFER,
4917 SECOND_VECTOR_DOUBLE_STACK,
4920 VkDeviceSize m_SumFreeSize;
4921 SuballocationVectorType m_Suballocations0, m_Suballocations1;
4922 uint32_t m_1stVectorIndex;
4923 SECOND_VECTOR_MODE m_2ndVectorMode;
4925 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
4926 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
4927 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
4928 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
4931 size_t m_1stNullItemsBeginCount;
4933 size_t m_1stNullItemsMiddleCount;
4935 size_t m_2ndNullItemsCount;
4937 bool ShouldCompact1st()
const;
4938 void CleanupAfterFree();
4944 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
4946 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
4949 virtual ~VmaBlockMetadata_Buddy();
4950 virtual void Init(VkDeviceSize size);
4952 virtual bool Validate()
const;
4953 virtual size_t GetAllocationCount()
const;
4954 virtual VkDeviceSize GetSumFreeSize()
const;
4955 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4956 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
4958 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4959 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4961 #if VMA_STATS_STRING_ENABLED 4962 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4965 virtual bool CreateAllocationRequest(
4966 uint32_t currentFrameIndex,
4967 uint32_t frameInUseCount,
4968 VkDeviceSize bufferImageGranularity,
4969 VkDeviceSize allocSize,
4970 VkDeviceSize allocAlignment,
4972 VmaSuballocationType allocType,
4973 bool canMakeOtherLost,
4975 VmaAllocationRequest* pAllocationRequest);
4977 virtual bool MakeRequestedAllocationsLost(
4978 uint32_t currentFrameIndex,
4979 uint32_t frameInUseCount,
4980 VmaAllocationRequest* pAllocationRequest);
4982 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4984 virtual VkResult CheckCorruption(
const void* pBlockData);
4987 const VmaAllocationRequest& request,
4988 VmaSuballocationType type,
4989 VkDeviceSize allocSize,
4993 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
4994 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
4997 static const size_t MAX_LEVELS = 30;
5001 VkDeviceSize offset;
5034 } m_FreeList[MAX_LEVELS];
5036 void DeleteNode(Node* node);
5037 bool ValidateNode(
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5038 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5039 VkDeviceSize LevelToNodeSize(uint32_t level)
const;
5041 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5042 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5046 void AddToFreeListFront(uint32_t level, Node* node);
5050 void RemoveFromFreeList(uint32_t level, Node* node);
5052 #if VMA_STATS_STRING_ENABLED 5053 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5063 class VmaDeviceMemoryBlock
5065 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5067 VmaBlockMetadata* m_pMetadata;
5071 ~VmaDeviceMemoryBlock()
5073 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5074 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5080 uint32_t newMemoryTypeIndex,
5081 VkDeviceMemory newMemory,
5082 VkDeviceSize newSize,
5084 uint32_t algorithm);
5088 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5089 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5090 uint32_t GetId()
const {
return m_Id; }
5091 void* GetMappedData()
const {
return m_pMappedData; }
5094 bool Validate()
const;
5099 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5102 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5103 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5105 VkResult BindBufferMemory(
5109 VkResult BindImageMemory(
5115 uint32_t m_MemoryTypeIndex;
5117 VkDeviceMemory m_hMemory;
5122 uint32_t m_MapCount;
5123 void* m_pMappedData;
5126 struct VmaPointerLess
5128 bool operator()(
const void* lhs,
const void* rhs)
const 5134 class VmaDefragmentator;
5142 struct VmaBlockVector
5144 VMA_CLASS_NO_COPY(VmaBlockVector)
5148 uint32_t memoryTypeIndex,
5149 VkDeviceSize preferredBlockSize,
5150 size_t minBlockCount,
5151 size_t maxBlockCount,
5152 VkDeviceSize bufferImageGranularity,
5153 uint32_t frameInUseCount,
5155 bool explicitBlockSize,
5156 uint32_t algorithm);
5159 VkResult CreateMinBlocks();
5161 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5162 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5163 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5164 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5165 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5169 bool IsEmpty()
const {
return m_Blocks.empty(); }
5170 bool IsCorruptionDetectionEnabled()
const;
5174 uint32_t currentFrameIndex,
5176 VkDeviceSize alignment,
5178 VmaSuballocationType suballocType,
5187 #if VMA_STATS_STRING_ENABLED 5188 void PrintDetailedMap(
class VmaJsonWriter& json);
5191 void MakePoolAllocationsLost(
5192 uint32_t currentFrameIndex,
5193 size_t* pLostAllocationCount);
5194 VkResult CheckCorruption();
5196 VmaDefragmentator* EnsureDefragmentator(
5198 uint32_t currentFrameIndex);
5200 VkResult Defragment(
5202 VkDeviceSize& maxBytesToMove,
5203 uint32_t& maxAllocationsToMove);
5205 void DestroyDefragmentator();
5208 friend class VmaDefragmentator;
5211 const uint32_t m_MemoryTypeIndex;
5212 const VkDeviceSize m_PreferredBlockSize;
5213 const size_t m_MinBlockCount;
5214 const size_t m_MaxBlockCount;
5215 const VkDeviceSize m_BufferImageGranularity;
5216 const uint32_t m_FrameInUseCount;
5217 const bool m_IsCustomPool;
5218 const bool m_ExplicitBlockSize;
5219 const uint32_t m_Algorithm;
5220 bool m_HasEmptyBlock;
5223 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5227 VmaDefragmentator* m_pDefragmentator;
5228 uint32_t m_NextBlockId;
5230 VkDeviceSize CalcMaxBlockSize()
const;
5233 void Remove(VmaDeviceMemoryBlock* pBlock);
5237 void IncrementallySortBlocks();
5240 VkResult AllocateFromBlock(
5241 VmaDeviceMemoryBlock* pBlock,
5243 uint32_t currentFrameIndex,
5245 VkDeviceSize alignment,
5248 VmaSuballocationType suballocType,
5252 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5257 VMA_CLASS_NO_COPY(VmaPool_T)
5259 VmaBlockVector m_BlockVector;
5264 VkDeviceSize preferredBlockSize);
5267 uint32_t GetId()
const {
return m_Id; }
5268 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5270 #if VMA_STATS_STRING_ENABLED 5278 class VmaDefragmentator
5280 VMA_CLASS_NO_COPY(VmaDefragmentator)
5283 VmaBlockVector*
const m_pBlockVector;
5284 uint32_t m_CurrentFrameIndex;
5285 VkDeviceSize m_BytesMoved;
5286 uint32_t m_AllocationsMoved;
5288 struct AllocationInfo
5291 VkBool32* m_pChanged;
5294 m_hAllocation(VK_NULL_HANDLE),
5295 m_pChanged(VMA_NULL)
5300 struct AllocationInfoSizeGreater
5302 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5304 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5309 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5313 VmaDeviceMemoryBlock* m_pBlock;
5314 bool m_HasNonMovableAllocations;
5315 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5317 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5319 m_HasNonMovableAllocations(true),
5320 m_Allocations(pAllocationCallbacks),
5321 m_pMappedDataForDefragmentation(VMA_NULL)
5325 void CalcHasNonMovableAllocations()
5327 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5328 const size_t defragmentAllocCount = m_Allocations.size();
5329 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5332 void SortAllocationsBySizeDescecnding()
5334 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5337 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5342 void* m_pMappedDataForDefragmentation;
5345 struct BlockPointerLess
5347 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5349 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5351 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5353 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5359 struct BlockInfoCompareMoveDestination
5361 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5363 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5367 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5371 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5379 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5380 BlockInfoVector m_Blocks;
5382 VkResult DefragmentRound(
5383 VkDeviceSize maxBytesToMove,
5384 uint32_t maxAllocationsToMove);
5386 static bool MoveMakesSense(
5387 size_t dstBlockIndex, VkDeviceSize dstOffset,
5388 size_t srcBlockIndex, VkDeviceSize srcOffset);
5393 VmaBlockVector* pBlockVector,
5394 uint32_t currentFrameIndex);
5396 ~VmaDefragmentator();
5398 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5399 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5401 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5403 VkResult Defragment(
5404 VkDeviceSize maxBytesToMove,
5405 uint32_t maxAllocationsToMove);
5408 #if VMA_RECORDING_ENABLED 5415 void WriteConfiguration(
5416 const VkPhysicalDeviceProperties& devProps,
5417 const VkPhysicalDeviceMemoryProperties& memProps,
5418 bool dedicatedAllocationExtensionEnabled);
5421 void RecordCreateAllocator(uint32_t frameIndex);
5422 void RecordDestroyAllocator(uint32_t frameIndex);
5423 void RecordCreatePool(uint32_t frameIndex,
5426 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5427 void RecordAllocateMemory(uint32_t frameIndex,
5428 const VkMemoryRequirements& vkMemReq,
5431 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5432 const VkMemoryRequirements& vkMemReq,
5433 bool requiresDedicatedAllocation,
5434 bool prefersDedicatedAllocation,
5437 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5438 const VkMemoryRequirements& vkMemReq,
5439 bool requiresDedicatedAllocation,
5440 bool prefersDedicatedAllocation,
5443 void RecordFreeMemory(uint32_t frameIndex,
5445 void RecordSetAllocationUserData(uint32_t frameIndex,
5447 const void* pUserData);
5448 void RecordCreateLostAllocation(uint32_t frameIndex,
5450 void RecordMapMemory(uint32_t frameIndex,
5452 void RecordUnmapMemory(uint32_t frameIndex,
5454 void RecordFlushAllocation(uint32_t frameIndex,
5455 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5456 void RecordInvalidateAllocation(uint32_t frameIndex,
5457 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5458 void RecordCreateBuffer(uint32_t frameIndex,
5459 const VkBufferCreateInfo& bufCreateInfo,
5462 void RecordCreateImage(uint32_t frameIndex,
5463 const VkImageCreateInfo& imageCreateInfo,
5466 void RecordDestroyBuffer(uint32_t frameIndex,
5468 void RecordDestroyImage(uint32_t frameIndex,
5470 void RecordTouchAllocation(uint32_t frameIndex,
5472 void RecordGetAllocationInfo(uint32_t frameIndex,
5474 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5484 class UserDataString
5488 const char* GetString()
const {
return m_Str; }
5498 VMA_MUTEX m_FileMutex;
5500 int64_t m_StartCounter;
5502 void GetBasicParams(CallParams& outParams);
5506 #endif // #if VMA_RECORDING_ENABLED 5509 struct VmaAllocator_T
5511 VMA_CLASS_NO_COPY(VmaAllocator_T)
5514 bool m_UseKhrDedicatedAllocation;
5516 bool m_AllocationCallbacksSpecified;
5517 VkAllocationCallbacks m_AllocationCallbacks;
5521 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5522 VMA_MUTEX m_HeapSizeLimitMutex;
5524 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5525 VkPhysicalDeviceMemoryProperties m_MemProps;
5528 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5531 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5532 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5533 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5539 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5541 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5545 return m_VulkanFunctions;
5548 VkDeviceSize GetBufferImageGranularity()
const 5551 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5552 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5555 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5556 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5558 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5560 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5561 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5564 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5566 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5567 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5570 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5572 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5573 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5574 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5577 bool IsIntegratedGpu()
const 5579 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5582 #if VMA_RECORDING_ENABLED 5583 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5586 void GetBufferMemoryRequirements(
5588 VkMemoryRequirements& memReq,
5589 bool& requiresDedicatedAllocation,
5590 bool& prefersDedicatedAllocation)
const;
5591 void GetImageMemoryRequirements(
5593 VkMemoryRequirements& memReq,
5594 bool& requiresDedicatedAllocation,
5595 bool& prefersDedicatedAllocation)
const;
5598 VkResult AllocateMemory(
5599 const VkMemoryRequirements& vkMemReq,
5600 bool requiresDedicatedAllocation,
5601 bool prefersDedicatedAllocation,
5602 VkBuffer dedicatedBuffer,
5603 VkImage dedicatedImage,
5605 VmaSuballocationType suballocType,
5611 void CalculateStats(
VmaStats* pStats);
5613 #if VMA_STATS_STRING_ENABLED 5614 void PrintDetailedMap(
class VmaJsonWriter& json);
5617 VkResult Defragment(
5619 size_t allocationCount,
5620 VkBool32* pAllocationsChanged,
5628 void DestroyPool(
VmaPool pool);
5631 void SetCurrentFrameIndex(uint32_t frameIndex);
5632 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5634 void MakePoolAllocationsLost(
5636 size_t* pLostAllocationCount);
5637 VkResult CheckPoolCorruption(
VmaPool hPool);
5638 VkResult CheckCorruption(uint32_t memoryTypeBits);
5642 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5643 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5648 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5649 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5651 void FlushOrInvalidateAllocation(
5653 VkDeviceSize offset, VkDeviceSize size,
5654 VMA_CACHE_OPERATION op);
5656 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5659 VkDeviceSize m_PreferredLargeHeapBlockSize;
5661 VkPhysicalDevice m_PhysicalDevice;
5662 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5664 VMA_MUTEX m_PoolsMutex;
5666 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5667 uint32_t m_NextPoolId;
5671 #if VMA_RECORDING_ENABLED 5672 VmaRecorder* m_pRecorder;
5677 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5679 VkResult AllocateMemoryOfType(
5681 VkDeviceSize alignment,
5682 bool dedicatedAllocation,
5683 VkBuffer dedicatedBuffer,
5684 VkImage dedicatedImage,
5686 uint32_t memTypeIndex,
5687 VmaSuballocationType suballocType,
5691 VkResult AllocateDedicatedMemory(
5693 VmaSuballocationType suballocType,
5694 uint32_t memTypeIndex,
5696 bool isUserDataString,
5698 VkBuffer dedicatedBuffer,
5699 VkImage dedicatedImage,
5709 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5711 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5714 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5716 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5719 template<
typename T>
5722 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5725 template<
typename T>
5726 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5728 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5731 template<
typename T>
5732 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5737 VmaFree(hAllocator, ptr);
5741 template<
typename T>
5742 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5746 for(
size_t i = count; i--; )
5748 VmaFree(hAllocator, ptr);
5755 #if VMA_STATS_STRING_ENABLED 5757 class VmaStringBuilder
5760 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5761 size_t GetLength()
const {
return m_Data.size(); }
5762 const char* GetData()
const {
return m_Data.data(); }
5764 void Add(
char ch) { m_Data.push_back(ch); }
5765 void Add(
const char* pStr);
5766 void AddNewLine() { Add(
'\n'); }
5767 void AddNumber(uint32_t num);
5768 void AddNumber(uint64_t num);
5769 void AddPointer(
const void* ptr);
5772 VmaVector< char, VmaStlAllocator<char> > m_Data;
5775 void VmaStringBuilder::Add(
const char* pStr)
5777 const size_t strLen = strlen(pStr);
5780 const size_t oldCount = m_Data.size();
5781 m_Data.resize(oldCount + strLen);
5782 memcpy(m_Data.data() + oldCount, pStr, strLen);
5786 void VmaStringBuilder::AddNumber(uint32_t num)
5789 VmaUint32ToStr(buf,
sizeof(buf), num);
5793 void VmaStringBuilder::AddNumber(uint64_t num)
5796 VmaUint64ToStr(buf,
sizeof(buf), num);
5800 void VmaStringBuilder::AddPointer(
const void* ptr)
5803 VmaPtrToStr(buf,
sizeof(buf), ptr);
5807 #endif // #if VMA_STATS_STRING_ENABLED 5812 #if VMA_STATS_STRING_ENABLED 5816 VMA_CLASS_NO_COPY(VmaJsonWriter)
5818 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
5821 void BeginObject(
bool singleLine =
false);
5824 void BeginArray(
bool singleLine =
false);
5827 void WriteString(
const char* pStr);
5828 void BeginString(
const char* pStr = VMA_NULL);
5829 void ContinueString(
const char* pStr);
5830 void ContinueString(uint32_t n);
5831 void ContinueString(uint64_t n);
5832 void ContinueString_Pointer(
const void* ptr);
5833 void EndString(
const char* pStr = VMA_NULL);
5835 void WriteNumber(uint32_t n);
5836 void WriteNumber(uint64_t n);
5837 void WriteBool(
bool b);
5841 static const char*
const INDENT;
5843 enum COLLECTION_TYPE
5845 COLLECTION_TYPE_OBJECT,
5846 COLLECTION_TYPE_ARRAY,
5850 COLLECTION_TYPE type;
5851 uint32_t valueCount;
5852 bool singleLineMode;
5855 VmaStringBuilder& m_SB;
5856 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
5857 bool m_InsideString;
5859 void BeginValue(
bool isString);
5860 void WriteIndent(
bool oneLess =
false);
5863 const char*
const VmaJsonWriter::INDENT =
" ";
5865 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
5867 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
5868 m_InsideString(false)
5872 VmaJsonWriter::~VmaJsonWriter()
5874 VMA_ASSERT(!m_InsideString);
5875 VMA_ASSERT(m_Stack.empty());
5878 void VmaJsonWriter::BeginObject(
bool singleLine)
5880 VMA_ASSERT(!m_InsideString);
5886 item.type = COLLECTION_TYPE_OBJECT;
5887 item.valueCount = 0;
5888 item.singleLineMode = singleLine;
5889 m_Stack.push_back(item);
5892 void VmaJsonWriter::EndObject()
5894 VMA_ASSERT(!m_InsideString);
5899 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
5903 void VmaJsonWriter::BeginArray(
bool singleLine)
5905 VMA_ASSERT(!m_InsideString);
5911 item.type = COLLECTION_TYPE_ARRAY;
5912 item.valueCount = 0;
5913 item.singleLineMode = singleLine;
5914 m_Stack.push_back(item);
5917 void VmaJsonWriter::EndArray()
5919 VMA_ASSERT(!m_InsideString);
5924 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
5928 void VmaJsonWriter::WriteString(
const char* pStr)
5934 void VmaJsonWriter::BeginString(
const char* pStr)
5936 VMA_ASSERT(!m_InsideString);
5940 m_InsideString =
true;
5941 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5943 ContinueString(pStr);
5947 void VmaJsonWriter::ContinueString(
const char* pStr)
5949 VMA_ASSERT(m_InsideString);
5951 const size_t strLen = strlen(pStr);
5952 for(
size_t i = 0; i < strLen; ++i)
5985 VMA_ASSERT(0 &&
"Character not currently supported.");
5991 void VmaJsonWriter::ContinueString(uint32_t n)
5993 VMA_ASSERT(m_InsideString);
5997 void VmaJsonWriter::ContinueString(uint64_t n)
5999 VMA_ASSERT(m_InsideString);
6003 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6005 VMA_ASSERT(m_InsideString);
6006 m_SB.AddPointer(ptr);
6009 void VmaJsonWriter::EndString(
const char* pStr)
6011 VMA_ASSERT(m_InsideString);
6012 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6014 ContinueString(pStr);
6017 m_InsideString =
false;
6020 void VmaJsonWriter::WriteNumber(uint32_t n)
6022 VMA_ASSERT(!m_InsideString);
6027 void VmaJsonWriter::WriteNumber(uint64_t n)
6029 VMA_ASSERT(!m_InsideString);
6034 void VmaJsonWriter::WriteBool(
bool b)
6036 VMA_ASSERT(!m_InsideString);
6038 m_SB.Add(b ?
"true" :
"false");
6041 void VmaJsonWriter::WriteNull()
6043 VMA_ASSERT(!m_InsideString);
6048 void VmaJsonWriter::BeginValue(
bool isString)
6050 if(!m_Stack.empty())
6052 StackItem& currItem = m_Stack.back();
6053 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6054 currItem.valueCount % 2 == 0)
6056 VMA_ASSERT(isString);
6059 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6060 currItem.valueCount % 2 != 0)
6064 else if(currItem.valueCount > 0)
6073 ++currItem.valueCount;
6077 void VmaJsonWriter::WriteIndent(
bool oneLess)
6079 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6083 size_t count = m_Stack.size();
6084 if(count > 0 && oneLess)
6088 for(
size_t i = 0; i < count; ++i)
6095 #endif // #if VMA_STATS_STRING_ENABLED 6099 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
6101 if(IsUserDataString())
6103 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
6105 FreeUserDataString(hAllocator);
6107 if(pUserData != VMA_NULL)
6109 const char*
const newStrSrc = (
char*)pUserData;
6110 const size_t newStrLen = strlen(newStrSrc);
6111 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
6112 memcpy(newStrDst, newStrSrc, newStrLen + 1);
6113 m_pUserData = newStrDst;
6118 m_pUserData = pUserData;
6122 void VmaAllocation_T::ChangeBlockAllocation(
6124 VmaDeviceMemoryBlock* block,
6125 VkDeviceSize offset)
6127 VMA_ASSERT(block != VMA_NULL);
6128 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6131 if(block != m_BlockAllocation.m_Block)
6133 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
6134 if(IsPersistentMap())
6136 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
6137 block->Map(hAllocator, mapRefCount, VMA_NULL);
6140 m_BlockAllocation.m_Block = block;
6141 m_BlockAllocation.m_Offset = offset;
6144 VkDeviceSize VmaAllocation_T::GetOffset()
const 6148 case ALLOCATION_TYPE_BLOCK:
6149 return m_BlockAllocation.m_Offset;
6150 case ALLOCATION_TYPE_DEDICATED:
6158 VkDeviceMemory VmaAllocation_T::GetMemory()
const 6162 case ALLOCATION_TYPE_BLOCK:
6163 return m_BlockAllocation.m_Block->GetDeviceMemory();
6164 case ALLOCATION_TYPE_DEDICATED:
6165 return m_DedicatedAllocation.m_hMemory;
6168 return VK_NULL_HANDLE;
6172 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 6176 case ALLOCATION_TYPE_BLOCK:
6177 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
6178 case ALLOCATION_TYPE_DEDICATED:
6179 return m_DedicatedAllocation.m_MemoryTypeIndex;
6186 void* VmaAllocation_T::GetMappedData()
const 6190 case ALLOCATION_TYPE_BLOCK:
6193 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
6194 VMA_ASSERT(pBlockData != VMA_NULL);
6195 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
6202 case ALLOCATION_TYPE_DEDICATED:
6203 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
6204 return m_DedicatedAllocation.m_pMappedData;
6211 bool VmaAllocation_T::CanBecomeLost()
const 6215 case ALLOCATION_TYPE_BLOCK:
6216 return m_BlockAllocation.m_CanBecomeLost;
6217 case ALLOCATION_TYPE_DEDICATED:
6225 VmaPool VmaAllocation_T::GetPool()
const 6227 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6228 return m_BlockAllocation.m_hPool;
6231 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6233 VMA_ASSERT(CanBecomeLost());
6239 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6242 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6247 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6253 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6263 #if VMA_STATS_STRING_ENABLED 6266 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6275 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6277 json.WriteString(
"Type");
6278 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6280 json.WriteString(
"Size");
6281 json.WriteNumber(m_Size);
6283 if(m_pUserData != VMA_NULL)
6285 json.WriteString(
"UserData");
6286 if(IsUserDataString())
6288 json.WriteString((
const char*)m_pUserData);
6293 json.ContinueString_Pointer(m_pUserData);
6298 json.WriteString(
"CreationFrameIndex");
6299 json.WriteNumber(m_CreationFrameIndex);
6301 json.WriteString(
"LastUseFrameIndex");
6302 json.WriteNumber(GetLastUseFrameIndex());
6304 if(m_BufferImageUsage != 0)
6306 json.WriteString(
"Usage");
6307 json.WriteNumber(m_BufferImageUsage);
6313 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6315 VMA_ASSERT(IsUserDataString());
6316 if(m_pUserData != VMA_NULL)
6318 char*
const oldStr = (
char*)m_pUserData;
6319 const size_t oldStrLen = strlen(oldStr);
6320 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6321 m_pUserData = VMA_NULL;
6325 void VmaAllocation_T::BlockAllocMap()
6327 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6329 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6335 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6339 void VmaAllocation_T::BlockAllocUnmap()
6341 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6343 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6349 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6353 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6355 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6359 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6361 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6362 *ppData = m_DedicatedAllocation.m_pMappedData;
6368 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6369 return VK_ERROR_MEMORY_MAP_FAILED;
6374 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6375 hAllocator->m_hDevice,
6376 m_DedicatedAllocation.m_hMemory,
6381 if(result == VK_SUCCESS)
6383 m_DedicatedAllocation.m_pMappedData = *ppData;
6390 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6392 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6394 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6399 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6400 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6401 hAllocator->m_hDevice,
6402 m_DedicatedAllocation.m_hMemory);
6407 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6411 #if VMA_STATS_STRING_ENABLED 6413 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6417 json.WriteString(
"Blocks");
6420 json.WriteString(
"Allocations");
6423 json.WriteString(
"UnusedRanges");
6426 json.WriteString(
"UsedBytes");
6429 json.WriteString(
"UnusedBytes");
6434 json.WriteString(
"AllocationSize");
6435 json.BeginObject(
true);
6436 json.WriteString(
"Min");
6438 json.WriteString(
"Avg");
6440 json.WriteString(
"Max");
6447 json.WriteString(
"UnusedRangeSize");
6448 json.BeginObject(
true);
6449 json.WriteString(
"Min");
6451 json.WriteString(
"Avg");
6453 json.WriteString(
"Max");
6461 #endif // #if VMA_STATS_STRING_ENABLED 6463 struct VmaSuballocationItemSizeLess
6466 const VmaSuballocationList::iterator lhs,
6467 const VmaSuballocationList::iterator rhs)
const 6469 return lhs->size < rhs->size;
6472 const VmaSuballocationList::iterator lhs,
6473 VkDeviceSize rhsSize)
const 6475 return lhs->size < rhsSize;
6483 #if VMA_STATS_STRING_ENABLED 6485 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6486 VkDeviceSize unusedBytes,
6487 size_t allocationCount,
6488 size_t unusedRangeCount)
const 6492 json.WriteString(
"TotalBytes");
6493 json.WriteNumber(GetSize());
6495 json.WriteString(
"UnusedBytes");
6496 json.WriteNumber(unusedBytes);
6498 json.WriteString(
"Allocations");
6499 json.WriteNumber((uint64_t)allocationCount);
6501 json.WriteString(
"UnusedRanges");
6502 json.WriteNumber((uint64_t)unusedRangeCount);
6504 json.WriteString(
"Suballocations");
6508 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6509 VkDeviceSize offset,
6512 json.BeginObject(
true);
6514 json.WriteString(
"Offset");
6515 json.WriteNumber(offset);
6517 hAllocation->PrintParameters(json);
6522 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6523 VkDeviceSize offset,
6524 VkDeviceSize size)
const 6526 json.BeginObject(
true);
6528 json.WriteString(
"Offset");
6529 json.WriteNumber(offset);
6531 json.WriteString(
"Type");
6532 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6534 json.WriteString(
"Size");
6535 json.WriteNumber(size);
6540 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6546 #endif // #if VMA_STATS_STRING_ENABLED 6551 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6554 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6555 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6559 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6563 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6565 VmaBlockMetadata::Init(size);
6567 m_SumFreeSize = size;
6569 VmaSuballocation suballoc = {};
6570 suballoc.offset = 0;
6571 suballoc.size = size;
6572 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6573 suballoc.hAllocation = VK_NULL_HANDLE;
6575 m_Suballocations.push_back(suballoc);
6576 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6578 m_FreeSuballocationsBySize.push_back(suballocItem);
6581 bool VmaBlockMetadata_Generic::Validate()
const 6583 if(m_Suballocations.empty())
6589 VkDeviceSize calculatedOffset = 0;
6591 uint32_t calculatedFreeCount = 0;
6593 VkDeviceSize calculatedSumFreeSize = 0;
6596 size_t freeSuballocationsToRegister = 0;
6598 bool prevFree =
false;
6600 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6601 suballocItem != m_Suballocations.cend();
6604 const VmaSuballocation& subAlloc = *suballocItem;
6607 if(subAlloc.offset != calculatedOffset)
6612 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6614 if(prevFree && currFree)
6619 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
6626 calculatedSumFreeSize += subAlloc.size;
6627 ++calculatedFreeCount;
6628 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6630 ++freeSuballocationsToRegister;
6634 if(subAlloc.size < VMA_DEBUG_MARGIN)
6641 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
6645 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
6651 if(VMA_DEBUG_MARGIN > 0 && !prevFree)
6657 calculatedOffset += subAlloc.size;
6658 prevFree = currFree;
6663 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
6668 VkDeviceSize lastSize = 0;
6669 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6671 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6674 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
6679 if(suballocItem->size < lastSize)
6684 lastSize = suballocItem->size;
6688 if(!ValidateFreeSuballocationList() ||
6689 (calculatedOffset != GetSize()) ||
6690 (calculatedSumFreeSize != m_SumFreeSize) ||
6691 (calculatedFreeCount != m_FreeCount))
6699 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6701 if(!m_FreeSuballocationsBySize.empty())
6703 return m_FreeSuballocationsBySize.back()->size;
6711 bool VmaBlockMetadata_Generic::IsEmpty()
const 6713 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6716 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6720 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6732 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6733 suballocItem != m_Suballocations.cend();
6736 const VmaSuballocation& suballoc = *suballocItem;
6737 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6750 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6752 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6754 inoutStats.
size += GetSize();
6761 #if VMA_STATS_STRING_ENABLED 6763 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6765 PrintDetailedMap_Begin(json,
6767 m_Suballocations.size() - (size_t)m_FreeCount,
6771 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6772 suballocItem != m_Suballocations.cend();
6773 ++suballocItem, ++i)
6775 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6777 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6781 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6785 PrintDetailedMap_End(json);
6788 #endif // #if VMA_STATS_STRING_ENABLED 6790 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6791 uint32_t currentFrameIndex,
6792 uint32_t frameInUseCount,
6793 VkDeviceSize bufferImageGranularity,
6794 VkDeviceSize allocSize,
6795 VkDeviceSize allocAlignment,
6797 VmaSuballocationType allocType,
6798 bool canMakeOtherLost,
6800 VmaAllocationRequest* pAllocationRequest)
6802 VMA_ASSERT(allocSize > 0);
6803 VMA_ASSERT(!upperAddress);
6804 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6805 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6806 VMA_HEAVY_ASSERT(Validate());
6809 if(canMakeOtherLost ==
false &&
6810 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6816 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6817 if(freeSuballocCount > 0)
6822 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6823 m_FreeSuballocationsBySize.data(),
6824 m_FreeSuballocationsBySize.data() + freeSuballocCount,
6825 allocSize + 2 * VMA_DEBUG_MARGIN,
6826 VmaSuballocationItemSizeLess());
6827 size_t index = it - m_FreeSuballocationsBySize.data();
6828 for(; index < freeSuballocCount; ++index)
6833 bufferImageGranularity,
6837 m_FreeSuballocationsBySize[index],
6839 &pAllocationRequest->offset,
6840 &pAllocationRequest->itemsToMakeLostCount,
6841 &pAllocationRequest->sumFreeSize,
6842 &pAllocationRequest->sumItemSize))
6844 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6852 for(
size_t index = freeSuballocCount; index--; )
6857 bufferImageGranularity,
6861 m_FreeSuballocationsBySize[index],
6863 &pAllocationRequest->offset,
6864 &pAllocationRequest->itemsToMakeLostCount,
6865 &pAllocationRequest->sumFreeSize,
6866 &pAllocationRequest->sumItemSize))
6868 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6875 if(canMakeOtherLost)
6879 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
6880 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
6882 VmaAllocationRequest tmpAllocRequest = {};
6883 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
6884 suballocIt != m_Suballocations.end();
6887 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
6888 suballocIt->hAllocation->CanBecomeLost())
6893 bufferImageGranularity,
6899 &tmpAllocRequest.offset,
6900 &tmpAllocRequest.itemsToMakeLostCount,
6901 &tmpAllocRequest.sumFreeSize,
6902 &tmpAllocRequest.sumItemSize))
6904 tmpAllocRequest.item = suballocIt;
6906 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
6909 *pAllocationRequest = tmpAllocRequest;
6915 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
6924 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
6925 uint32_t currentFrameIndex,
6926 uint32_t frameInUseCount,
6927 VmaAllocationRequest* pAllocationRequest)
6929 while(pAllocationRequest->itemsToMakeLostCount > 0)
6931 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
6933 ++pAllocationRequest->item;
6935 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6936 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
6937 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
6938 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6940 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
6941 --pAllocationRequest->itemsToMakeLostCount;
6949 VMA_HEAVY_ASSERT(Validate());
6950 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6951 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
6956 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6958 uint32_t lostAllocationCount = 0;
6959 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6960 it != m_Suballocations.end();
6963 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
6964 it->hAllocation->CanBecomeLost() &&
6965 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6967 it = FreeSuballocation(it);
6968 ++lostAllocationCount;
6971 return lostAllocationCount;
6974 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
6976 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6977 it != m_Suballocations.end();
6980 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6982 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
6984 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
6985 return VK_ERROR_VALIDATION_FAILED_EXT;
6987 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
6989 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
6990 return VK_ERROR_VALIDATION_FAILED_EXT;
6998 void VmaBlockMetadata_Generic::Alloc(
6999 const VmaAllocationRequest& request,
7000 VmaSuballocationType type,
7001 VkDeviceSize allocSize,
7005 VMA_ASSERT(!upperAddress);
7006 VMA_ASSERT(request.item != m_Suballocations.end());
7007 VmaSuballocation& suballoc = *request.item;
7009 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7011 VMA_ASSERT(request.offset >= suballoc.offset);
7012 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7013 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7014 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7018 UnregisterFreeSuballocation(request.item);
7020 suballoc.offset = request.offset;
7021 suballoc.size = allocSize;
7022 suballoc.type = type;
7023 suballoc.hAllocation = hAllocation;
7028 VmaSuballocation paddingSuballoc = {};
7029 paddingSuballoc.offset = request.offset + allocSize;
7030 paddingSuballoc.size = paddingEnd;
7031 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7032 VmaSuballocationList::iterator next = request.item;
7034 const VmaSuballocationList::iterator paddingEndItem =
7035 m_Suballocations.insert(next, paddingSuballoc);
7036 RegisterFreeSuballocation(paddingEndItem);
7042 VmaSuballocation paddingSuballoc = {};
7043 paddingSuballoc.offset = request.offset - paddingBegin;
7044 paddingSuballoc.size = paddingBegin;
7045 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7046 const VmaSuballocationList::iterator paddingBeginItem =
7047 m_Suballocations.insert(request.item, paddingSuballoc);
7048 RegisterFreeSuballocation(paddingBeginItem);
7052 m_FreeCount = m_FreeCount - 1;
7053 if(paddingBegin > 0)
7061 m_SumFreeSize -= allocSize;
7064 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7066 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7067 suballocItem != m_Suballocations.end();
7070 VmaSuballocation& suballoc = *suballocItem;
7071 if(suballoc.hAllocation == allocation)
7073 FreeSuballocation(suballocItem);
7074 VMA_HEAVY_ASSERT(Validate());
7078 VMA_ASSERT(0 &&
"Not found!");
7081 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7083 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7084 suballocItem != m_Suballocations.end();
7087 VmaSuballocation& suballoc = *suballocItem;
7088 if(suballoc.offset == offset)
7090 FreeSuballocation(suballocItem);
7094 VMA_ASSERT(0 &&
"Not found!");
7097 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 7099 VkDeviceSize lastSize = 0;
7100 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7102 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7104 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7109 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7114 if(it->size < lastSize)
7120 lastSize = it->size;
7125 bool VmaBlockMetadata_Generic::CheckAllocation(
7126 uint32_t currentFrameIndex,
7127 uint32_t frameInUseCount,
7128 VkDeviceSize bufferImageGranularity,
7129 VkDeviceSize allocSize,
7130 VkDeviceSize allocAlignment,
7131 VmaSuballocationType allocType,
7132 VmaSuballocationList::const_iterator suballocItem,
7133 bool canMakeOtherLost,
7134 VkDeviceSize* pOffset,
7135 size_t* itemsToMakeLostCount,
7136 VkDeviceSize* pSumFreeSize,
7137 VkDeviceSize* pSumItemSize)
const 7139 VMA_ASSERT(allocSize > 0);
7140 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7141 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7142 VMA_ASSERT(pOffset != VMA_NULL);
7144 *itemsToMakeLostCount = 0;
7148 if(canMakeOtherLost)
7150 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7152 *pSumFreeSize = suballocItem->size;
7156 if(suballocItem->hAllocation->CanBecomeLost() &&
7157 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7159 ++*itemsToMakeLostCount;
7160 *pSumItemSize = suballocItem->size;
7169 if(GetSize() - suballocItem->offset < allocSize)
7175 *pOffset = suballocItem->offset;
7178 if(VMA_DEBUG_MARGIN > 0)
7180 *pOffset += VMA_DEBUG_MARGIN;
7184 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7188 if(bufferImageGranularity > 1)
7190 bool bufferImageGranularityConflict =
false;
7191 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7192 while(prevSuballocItem != m_Suballocations.cbegin())
7195 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7196 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7198 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7200 bufferImageGranularityConflict =
true;
7208 if(bufferImageGranularityConflict)
7210 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7216 if(*pOffset >= suballocItem->offset + suballocItem->size)
7222 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
7225 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7227 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
7229 if(suballocItem->offset + totalSize > GetSize())
7236 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7237 if(totalSize > suballocItem->size)
7239 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7240 while(remainingSize > 0)
7243 if(lastSuballocItem == m_Suballocations.cend())
7247 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7249 *pSumFreeSize += lastSuballocItem->size;
7253 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7254 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7255 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7257 ++*itemsToMakeLostCount;
7258 *pSumItemSize += lastSuballocItem->size;
7265 remainingSize = (lastSuballocItem->size < remainingSize) ?
7266 remainingSize - lastSuballocItem->size : 0;
7272 if(bufferImageGranularity > 1)
7274 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7276 while(nextSuballocItem != m_Suballocations.cend())
7278 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7279 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7281 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7283 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7284 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7285 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7287 ++*itemsToMakeLostCount;
7306 const VmaSuballocation& suballoc = *suballocItem;
7307 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7309 *pSumFreeSize = suballoc.size;
7312 if(suballoc.size < allocSize)
7318 *pOffset = suballoc.offset;
7321 if(VMA_DEBUG_MARGIN > 0)
7323 *pOffset += VMA_DEBUG_MARGIN;
7327 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7331 if(bufferImageGranularity > 1)
7333 bool bufferImageGranularityConflict =
false;
7334 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7335 while(prevSuballocItem != m_Suballocations.cbegin())
7338 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7339 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7341 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7343 bufferImageGranularityConflict =
true;
7351 if(bufferImageGranularityConflict)
7353 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7358 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7361 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7364 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7371 if(bufferImageGranularity > 1)
7373 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7375 while(nextSuballocItem != m_Suballocations.cend())
7377 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7378 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7380 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7399 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7401 VMA_ASSERT(item != m_Suballocations.end());
7402 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7404 VmaSuballocationList::iterator nextItem = item;
7406 VMA_ASSERT(nextItem != m_Suballocations.end());
7407 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7409 item->size += nextItem->size;
7411 m_Suballocations.erase(nextItem);
7414 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7417 VmaSuballocation& suballoc = *suballocItem;
7418 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7419 suballoc.hAllocation = VK_NULL_HANDLE;
7423 m_SumFreeSize += suballoc.size;
7426 bool mergeWithNext =
false;
7427 bool mergeWithPrev =
false;
7429 VmaSuballocationList::iterator nextItem = suballocItem;
7431 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7433 mergeWithNext =
true;
7436 VmaSuballocationList::iterator prevItem = suballocItem;
7437 if(suballocItem != m_Suballocations.begin())
7440 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7442 mergeWithPrev =
true;
7448 UnregisterFreeSuballocation(nextItem);
7449 MergeFreeWithNext(suballocItem);
7454 UnregisterFreeSuballocation(prevItem);
7455 MergeFreeWithNext(prevItem);
7456 RegisterFreeSuballocation(prevItem);
7461 RegisterFreeSuballocation(suballocItem);
7462 return suballocItem;
7466 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7468 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7469 VMA_ASSERT(item->size > 0);
7473 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7475 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7477 if(m_FreeSuballocationsBySize.empty())
7479 m_FreeSuballocationsBySize.push_back(item);
7483 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7491 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7493 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7494 VMA_ASSERT(item->size > 0);
7498 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7500 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7502 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7503 m_FreeSuballocationsBySize.data(),
7504 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7506 VmaSuballocationItemSizeLess());
7507 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7508 index < m_FreeSuballocationsBySize.size();
7511 if(m_FreeSuballocationsBySize[index] == item)
7513 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7516 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7518 VMA_ASSERT(0 &&
"Not found.");
7527 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7529 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7530 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7531 m_1stVectorIndex(0),
7532 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7533 m_1stNullItemsBeginCount(0),
7534 m_1stNullItemsMiddleCount(0),
7535 m_2ndNullItemsCount(0)
7539 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7543 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7545 VmaBlockMetadata::Init(size);
7546 m_SumFreeSize = size;
7549 bool VmaBlockMetadata_Linear::Validate()
const 7551 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7552 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7554 if(suballocations2nd.empty() != (m_2ndVectorMode == SECOND_VECTOR_EMPTY))
7558 if(suballocations1st.empty() && !suballocations2nd.empty() &&
7559 m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7563 if(!suballocations1st.empty())
7566 if(suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
7571 if(suballocations1st.back().hAllocation == VK_NULL_HANDLE)
7576 if(!suballocations2nd.empty())
7579 if(suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
7585 if(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount > suballocations1st.size())
7589 if(m_2ndNullItemsCount > suballocations2nd.size())
7594 VkDeviceSize sumUsedSize = 0;
7595 const size_t suballoc1stCount = suballocations1st.size();
7596 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7598 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7600 const size_t suballoc2ndCount = suballocations2nd.size();
7601 size_t nullItem2ndCount = 0;
7602 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7604 const VmaSuballocation& suballoc = suballocations2nd[i];
7605 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7607 if(currFree != (suballoc.hAllocation == VK_NULL_HANDLE))
7611 if(suballoc.offset < offset)
7618 if(suballoc.hAllocation->GetOffset() != suballoc.offset)
7622 if(suballoc.hAllocation->GetSize() != suballoc.size)
7626 sumUsedSize += suballoc.size;
7633 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7636 if(nullItem2ndCount != m_2ndNullItemsCount)
7642 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7644 const VmaSuballocation& suballoc = suballocations1st[i];
7645 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE ||
7646 suballoc.hAllocation != VK_NULL_HANDLE)
7652 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7654 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7656 const VmaSuballocation& suballoc = suballocations1st[i];
7657 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7659 if(currFree != (suballoc.hAllocation == VK_NULL_HANDLE))
7663 if(suballoc.offset < offset)
7667 if(i < m_1stNullItemsBeginCount && !currFree)
7674 if(suballoc.hAllocation->GetOffset() != suballoc.offset)
7678 if(suballoc.hAllocation->GetSize() != suballoc.size)
7682 sumUsedSize += suballoc.size;
7689 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7691 if(nullItem1stCount != m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount)
7696 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7698 const size_t suballoc2ndCount = suballocations2nd.size();
7699 size_t nullItem2ndCount = 0;
7700 for(
size_t i = suballoc2ndCount; i--; )
7702 const VmaSuballocation& suballoc = suballocations2nd[i];
7703 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7705 if(currFree != (suballoc.hAllocation == VK_NULL_HANDLE))
7709 if(suballoc.offset < offset)
7716 if(suballoc.hAllocation->GetOffset() != suballoc.offset)
7720 if(suballoc.hAllocation->GetSize() != suballoc.size)
7724 sumUsedSize += suballoc.size;
7731 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7734 if(nullItem2ndCount != m_2ndNullItemsCount)
7740 if(offset > GetSize())
7744 if(m_SumFreeSize != GetSize() - sumUsedSize)
7752 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7754 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7755 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7758 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7760 const VkDeviceSize size = GetSize();
7772 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7774 switch(m_2ndVectorMode)
7776 case SECOND_VECTOR_EMPTY:
7782 const size_t suballocations1stCount = suballocations1st.size();
7783 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
7784 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
7785 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
7787 firstSuballoc.offset,
7788 size - (lastSuballoc.offset + lastSuballoc.size));
7792 case SECOND_VECTOR_RING_BUFFER:
7797 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7798 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
7799 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
7800 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
7804 case SECOND_VECTOR_DOUBLE_STACK:
7809 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7810 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
7811 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
7812 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
7822 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7824 const VkDeviceSize size = GetSize();
7825 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7826 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7827 const size_t suballoc1stCount = suballocations1st.size();
7828 const size_t suballoc2ndCount = suballocations2nd.size();
7839 VkDeviceSize lastOffset = 0;
7841 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7843 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7844 size_t nextAlloc2ndIndex = 0;
7845 while(lastOffset < freeSpace2ndTo1stEnd)
7848 while(nextAlloc2ndIndex < suballoc2ndCount &&
7849 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7851 ++nextAlloc2ndIndex;
7855 if(nextAlloc2ndIndex < suballoc2ndCount)
7857 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7860 if(lastOffset < suballoc.offset)
7863 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7877 lastOffset = suballoc.offset + suballoc.size;
7878 ++nextAlloc2ndIndex;
7884 if(lastOffset < freeSpace2ndTo1stEnd)
7886 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7894 lastOffset = freeSpace2ndTo1stEnd;
7899 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7900 const VkDeviceSize freeSpace1stTo2ndEnd =
7901 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
7902 while(lastOffset < freeSpace1stTo2ndEnd)
7905 while(nextAlloc1stIndex < suballoc1stCount &&
7906 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
7908 ++nextAlloc1stIndex;
7912 if(nextAlloc1stIndex < suballoc1stCount)
7914 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
7917 if(lastOffset < suballoc.offset)
7920 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7934 lastOffset = suballoc.offset + suballoc.size;
7935 ++nextAlloc1stIndex;
7941 if(lastOffset < freeSpace1stTo2ndEnd)
7943 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
7951 lastOffset = freeSpace1stTo2ndEnd;
7955 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7957 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
7958 while(lastOffset < size)
7961 while(nextAlloc2ndIndex != SIZE_MAX &&
7962 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7964 --nextAlloc2ndIndex;
7968 if(nextAlloc2ndIndex != SIZE_MAX)
7970 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7973 if(lastOffset < suballoc.offset)
7976 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7990 lastOffset = suballoc.offset + suballoc.size;
7991 --nextAlloc2ndIndex;
7997 if(lastOffset < size)
7999 const VkDeviceSize unusedRangeSize = size - lastOffset;
8015 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8017 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8018 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8019 const VkDeviceSize size = GetSize();
8020 const size_t suballoc1stCount = suballocations1st.size();
8021 const size_t suballoc2ndCount = suballocations2nd.size();
8023 inoutStats.
size += size;
8025 VkDeviceSize lastOffset = 0;
8027 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8029 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8030 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8031 while(lastOffset < freeSpace2ndTo1stEnd)
8034 while(nextAlloc2ndIndex < suballoc2ndCount &&
8035 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8037 ++nextAlloc2ndIndex;
8041 if(nextAlloc2ndIndex < suballoc2ndCount)
8043 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8046 if(lastOffset < suballoc.offset)
8049 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8060 lastOffset = suballoc.offset + suballoc.size;
8061 ++nextAlloc2ndIndex;
8066 if(lastOffset < freeSpace2ndTo1stEnd)
8069 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8076 lastOffset = freeSpace2ndTo1stEnd;
8081 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8082 const VkDeviceSize freeSpace1stTo2ndEnd =
8083 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8084 while(lastOffset < freeSpace1stTo2ndEnd)
8087 while(nextAlloc1stIndex < suballoc1stCount &&
8088 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8090 ++nextAlloc1stIndex;
8094 if(nextAlloc1stIndex < suballoc1stCount)
8096 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8099 if(lastOffset < suballoc.offset)
8102 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8113 lastOffset = suballoc.offset + suballoc.size;
8114 ++nextAlloc1stIndex;
8119 if(lastOffset < freeSpace1stTo2ndEnd)
8122 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8129 lastOffset = freeSpace1stTo2ndEnd;
8133 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8135 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8136 while(lastOffset < size)
8139 while(nextAlloc2ndIndex != SIZE_MAX &&
8140 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8142 --nextAlloc2ndIndex;
8146 if(nextAlloc2ndIndex != SIZE_MAX)
8148 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8151 if(lastOffset < suballoc.offset)
8154 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8165 lastOffset = suballoc.offset + suballoc.size;
8166 --nextAlloc2ndIndex;
8171 if(lastOffset < size)
8174 const VkDeviceSize unusedRangeSize = size - lastOffset;
8187 #if VMA_STATS_STRING_ENABLED 8188 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 8190 const VkDeviceSize size = GetSize();
8191 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8192 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8193 const size_t suballoc1stCount = suballocations1st.size();
8194 const size_t suballoc2ndCount = suballocations2nd.size();
8198 size_t unusedRangeCount = 0;
8199 VkDeviceSize usedBytes = 0;
8201 VkDeviceSize lastOffset = 0;
8203 size_t alloc2ndCount = 0;
8204 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8206 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8207 size_t nextAlloc2ndIndex = 0;
8208 while(lastOffset < freeSpace2ndTo1stEnd)
8211 while(nextAlloc2ndIndex < suballoc2ndCount &&
8212 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8214 ++nextAlloc2ndIndex;
8218 if(nextAlloc2ndIndex < suballoc2ndCount)
8220 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8223 if(lastOffset < suballoc.offset)
8232 usedBytes += suballoc.size;
8235 lastOffset = suballoc.offset + suballoc.size;
8236 ++nextAlloc2ndIndex;
8241 if(lastOffset < freeSpace2ndTo1stEnd)
8248 lastOffset = freeSpace2ndTo1stEnd;
8253 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8254 size_t alloc1stCount = 0;
8255 const VkDeviceSize freeSpace1stTo2ndEnd =
8256 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8257 while(lastOffset < freeSpace1stTo2ndEnd)
8260 while(nextAlloc1stIndex < suballoc1stCount &&
8261 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8263 ++nextAlloc1stIndex;
8267 if(nextAlloc1stIndex < suballoc1stCount)
8269 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8272 if(lastOffset < suballoc.offset)
8281 usedBytes += suballoc.size;
8284 lastOffset = suballoc.offset + suballoc.size;
8285 ++nextAlloc1stIndex;
8290 if(lastOffset < size)
8297 lastOffset = freeSpace1stTo2ndEnd;
8301 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8303 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8304 while(lastOffset < size)
8307 while(nextAlloc2ndIndex != SIZE_MAX &&
8308 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8310 --nextAlloc2ndIndex;
8314 if(nextAlloc2ndIndex != SIZE_MAX)
8316 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8319 if(lastOffset < suballoc.offset)
8328 usedBytes += suballoc.size;
8331 lastOffset = suballoc.offset + suballoc.size;
8332 --nextAlloc2ndIndex;
8337 if(lastOffset < size)
8349 const VkDeviceSize unusedBytes = size - usedBytes;
8350 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8355 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8357 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8358 size_t nextAlloc2ndIndex = 0;
8359 while(lastOffset < freeSpace2ndTo1stEnd)
8362 while(nextAlloc2ndIndex < suballoc2ndCount &&
8363 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8365 ++nextAlloc2ndIndex;
8369 if(nextAlloc2ndIndex < suballoc2ndCount)
8371 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8374 if(lastOffset < suballoc.offset)
8377 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8378 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8383 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8386 lastOffset = suballoc.offset + suballoc.size;
8387 ++nextAlloc2ndIndex;
8392 if(lastOffset < freeSpace2ndTo1stEnd)
8395 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8396 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8400 lastOffset = freeSpace2ndTo1stEnd;
8405 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8406 while(lastOffset < freeSpace1stTo2ndEnd)
8409 while(nextAlloc1stIndex < suballoc1stCount &&
8410 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8412 ++nextAlloc1stIndex;
8416 if(nextAlloc1stIndex < suballoc1stCount)
8418 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8421 if(lastOffset < suballoc.offset)
8424 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8425 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8430 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8433 lastOffset = suballoc.offset + suballoc.size;
8434 ++nextAlloc1stIndex;
8439 if(lastOffset < freeSpace1stTo2ndEnd)
8442 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8443 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8447 lastOffset = freeSpace1stTo2ndEnd;
8451 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8453 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8454 while(lastOffset < size)
8457 while(nextAlloc2ndIndex != SIZE_MAX &&
8458 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8460 --nextAlloc2ndIndex;
8464 if(nextAlloc2ndIndex != SIZE_MAX)
8466 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8469 if(lastOffset < suballoc.offset)
8472 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8473 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8478 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8481 lastOffset = suballoc.offset + suballoc.size;
8482 --nextAlloc2ndIndex;
8487 if(lastOffset < size)
8490 const VkDeviceSize unusedRangeSize = size - lastOffset;
8491 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8500 PrintDetailedMap_End(json);
8502 #endif // #if VMA_STATS_STRING_ENABLED 8504 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8505 uint32_t currentFrameIndex,
8506 uint32_t frameInUseCount,
8507 VkDeviceSize bufferImageGranularity,
8508 VkDeviceSize allocSize,
8509 VkDeviceSize allocAlignment,
8511 VmaSuballocationType allocType,
8512 bool canMakeOtherLost,
8514 VmaAllocationRequest* pAllocationRequest)
8516 VMA_ASSERT(allocSize > 0);
8517 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8518 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8519 VMA_HEAVY_ASSERT(Validate());
8521 const VkDeviceSize size = GetSize();
8522 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8523 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8527 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8529 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8534 if(allocSize > size)
8538 VkDeviceSize resultBaseOffset = size - allocSize;
8539 if(!suballocations2nd.empty())
8541 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8542 resultBaseOffset = lastSuballoc.offset - allocSize;
8543 if(allocSize > lastSuballoc.offset)
8550 VkDeviceSize resultOffset = resultBaseOffset;
8553 if(VMA_DEBUG_MARGIN > 0)
8555 if(resultOffset < VMA_DEBUG_MARGIN)
8559 resultOffset -= VMA_DEBUG_MARGIN;
8563 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8567 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8569 bool bufferImageGranularityConflict =
false;
8570 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8572 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8573 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8575 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8577 bufferImageGranularityConflict =
true;
8585 if(bufferImageGranularityConflict)
8587 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8592 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8593 suballocations1st.back().offset + suballocations1st.back().size :
8595 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8599 if(bufferImageGranularity > 1)
8601 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8603 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8604 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8606 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8620 pAllocationRequest->offset = resultOffset;
8621 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8622 pAllocationRequest->sumItemSize = 0;
8624 pAllocationRequest->itemsToMakeLostCount = 0;
8630 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8634 VkDeviceSize resultBaseOffset = 0;
8635 if(!suballocations1st.empty())
8637 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8638 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8642 VkDeviceSize resultOffset = resultBaseOffset;
8645 if(VMA_DEBUG_MARGIN > 0)
8647 resultOffset += VMA_DEBUG_MARGIN;
8651 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8655 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8657 bool bufferImageGranularityConflict =
false;
8658 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8660 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8661 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8663 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8665 bufferImageGranularityConflict =
true;
8673 if(bufferImageGranularityConflict)
8675 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8679 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8680 suballocations2nd.back().offset : size;
8683 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8687 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8689 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8691 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8692 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8694 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8708 pAllocationRequest->offset = resultOffset;
8709 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8710 pAllocationRequest->sumItemSize = 0;
8712 pAllocationRequest->itemsToMakeLostCount = 0;
8719 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8721 VMA_ASSERT(!suballocations1st.empty());
8723 VkDeviceSize resultBaseOffset = 0;
8724 if(!suballocations2nd.empty())
8726 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8727 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8731 VkDeviceSize resultOffset = resultBaseOffset;
8734 if(VMA_DEBUG_MARGIN > 0)
8736 resultOffset += VMA_DEBUG_MARGIN;
8740 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8744 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8746 bool bufferImageGranularityConflict =
false;
8747 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8749 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8750 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8752 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8754 bufferImageGranularityConflict =
true;
8762 if(bufferImageGranularityConflict)
8764 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8768 pAllocationRequest->itemsToMakeLostCount = 0;
8769 pAllocationRequest->sumItemSize = 0;
8770 size_t index1st = m_1stNullItemsBeginCount;
8772 if(canMakeOtherLost)
8774 while(index1st < suballocations1st.size() &&
8775 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8778 const VmaSuballocation& suballoc = suballocations1st[index1st];
8779 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
8785 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8786 if(suballoc.hAllocation->CanBecomeLost() &&
8787 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8789 ++pAllocationRequest->itemsToMakeLostCount;
8790 pAllocationRequest->sumItemSize += suballoc.size;
8802 if(bufferImageGranularity > 1)
8804 while(index1st < suballocations1st.size())
8806 const VmaSuballocation& suballoc = suballocations1st[index1st];
8807 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
8809 if(suballoc.hAllocation != VK_NULL_HANDLE)
8812 if(suballoc.hAllocation->CanBecomeLost() &&
8813 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8815 ++pAllocationRequest->itemsToMakeLostCount;
8816 pAllocationRequest->sumItemSize += suballoc.size;
8835 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
8836 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
8840 if(bufferImageGranularity > 1)
8842 for(
size_t nextSuballocIndex = index1st;
8843 nextSuballocIndex < suballocations1st.size();
8844 nextSuballocIndex++)
8846 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
8847 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8849 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8863 pAllocationRequest->offset = resultOffset;
8864 pAllocationRequest->sumFreeSize =
8865 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
8867 - pAllocationRequest->sumItemSize;
8877 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
8878 uint32_t currentFrameIndex,
8879 uint32_t frameInUseCount,
8880 VmaAllocationRequest* pAllocationRequest)
8882 if(pAllocationRequest->itemsToMakeLostCount == 0)
8887 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
8889 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8890 size_t index1st = m_1stNullItemsBeginCount;
8891 size_t madeLostCount = 0;
8892 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
8894 VMA_ASSERT(index1st < suballocations1st.size());
8895 VmaSuballocation& suballoc = suballocations1st[index1st];
8896 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8898 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8899 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
8900 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8902 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8903 suballoc.hAllocation = VK_NULL_HANDLE;
8904 m_SumFreeSize += suballoc.size;
8905 ++m_1stNullItemsMiddleCount;
8922 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8924 uint32_t lostAllocationCount = 0;
8926 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8927 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8929 VmaSuballocation& suballoc = suballocations1st[i];
8930 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8931 suballoc.hAllocation->CanBecomeLost() &&
8932 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8934 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8935 suballoc.hAllocation = VK_NULL_HANDLE;
8936 ++m_1stNullItemsMiddleCount;
8937 m_SumFreeSize += suballoc.size;
8938 ++lostAllocationCount;
8942 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8943 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8945 VmaSuballocation& suballoc = suballocations2nd[i];
8946 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8947 suballoc.hAllocation->CanBecomeLost() &&
8948 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8950 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8951 suballoc.hAllocation = VK_NULL_HANDLE;
8952 ++m_2ndNullItemsCount;
8953 ++lostAllocationCount;
8957 if(lostAllocationCount)
8962 return lostAllocationCount;
8965 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
8967 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8968 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8970 const VmaSuballocation& suballoc = suballocations1st[i];
8971 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8973 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
8975 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8976 return VK_ERROR_VALIDATION_FAILED_EXT;
8978 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8980 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8981 return VK_ERROR_VALIDATION_FAILED_EXT;
8986 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8987 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8989 const VmaSuballocation& suballoc = suballocations2nd[i];
8990 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8992 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
8994 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8995 return VK_ERROR_VALIDATION_FAILED_EXT;
8997 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8999 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9000 return VK_ERROR_VALIDATION_FAILED_EXT;
9008 void VmaBlockMetadata_Linear::Alloc(
9009 const VmaAllocationRequest& request,
9010 VmaSuballocationType type,
9011 VkDeviceSize allocSize,
9015 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9019 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
9020 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
9021 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9022 suballocations2nd.push_back(newSuballoc);
9023 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
9027 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9030 if(suballocations1st.empty())
9032 suballocations1st.push_back(newSuballoc);
9037 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
9040 VMA_ASSERT(request.offset + allocSize <= GetSize());
9041 suballocations1st.push_back(newSuballoc);
9044 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
9046 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9048 switch(m_2ndVectorMode)
9050 case SECOND_VECTOR_EMPTY:
9052 VMA_ASSERT(suballocations2nd.empty());
9053 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
9055 case SECOND_VECTOR_RING_BUFFER:
9057 VMA_ASSERT(!suballocations2nd.empty());
9059 case SECOND_VECTOR_DOUBLE_STACK:
9060 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
9066 suballocations2nd.push_back(newSuballoc);
9070 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
9075 m_SumFreeSize -= newSuballoc.size;
9078 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
9080 FreeAtOffset(allocation->GetOffset());
9083 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
9085 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9086 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9088 if(!suballocations1st.empty())
9091 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9092 if(firstSuballoc.offset == offset)
9094 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9095 firstSuballoc.hAllocation = VK_NULL_HANDLE;
9096 m_SumFreeSize += firstSuballoc.size;
9097 ++m_1stNullItemsBeginCount;
9104 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
9105 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9107 VmaSuballocation& lastSuballoc = suballocations2nd.back();
9108 if(lastSuballoc.offset == offset)
9110 m_SumFreeSize += lastSuballoc.size;
9111 suballocations2nd.pop_back();
9117 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
9119 VmaSuballocation& lastSuballoc = suballocations1st.back();
9120 if(lastSuballoc.offset == offset)
9122 m_SumFreeSize += lastSuballoc.size;
9123 suballocations1st.pop_back();
9131 VmaSuballocation refSuballoc;
9132 refSuballoc.offset = offset;
9134 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
9135 suballocations1st.begin() + m_1stNullItemsBeginCount,
9136 suballocations1st.end(),
9138 if(it != suballocations1st.end())
9140 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9141 it->hAllocation = VK_NULL_HANDLE;
9142 ++m_1stNullItemsMiddleCount;
9143 m_SumFreeSize += it->size;
9149 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
9152 VmaSuballocation refSuballoc;
9153 refSuballoc.offset = offset;
9155 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
9156 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
9157 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
9158 if(it != suballocations2nd.end())
9160 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9161 it->hAllocation = VK_NULL_HANDLE;
9162 ++m_2ndNullItemsCount;
9163 m_SumFreeSize += it->size;
9169 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
9172 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 9174 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9175 const size_t suballocCount = AccessSuballocations1st().size();
9176 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
9179 void VmaBlockMetadata_Linear::CleanupAfterFree()
9181 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9182 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9186 suballocations1st.clear();
9187 suballocations2nd.clear();
9188 m_1stNullItemsBeginCount = 0;
9189 m_1stNullItemsMiddleCount = 0;
9190 m_2ndNullItemsCount = 0;
9191 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9195 const size_t suballoc1stCount = suballocations1st.size();
9196 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9197 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
9200 while(m_1stNullItemsBeginCount < suballoc1stCount &&
9201 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9203 ++m_1stNullItemsBeginCount;
9204 --m_1stNullItemsMiddleCount;
9208 while(m_1stNullItemsMiddleCount > 0 &&
9209 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
9211 --m_1stNullItemsMiddleCount;
9212 suballocations1st.pop_back();
9216 while(m_2ndNullItemsCount > 0 &&
9217 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
9219 --m_2ndNullItemsCount;
9220 suballocations2nd.pop_back();
9223 if(ShouldCompact1st())
9225 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
9226 size_t srcIndex = m_1stNullItemsBeginCount;
9227 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
9229 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
9233 if(dstIndex != srcIndex)
9235 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9239 suballocations1st.resize(nonNullItemCount);
9240 m_1stNullItemsBeginCount = 0;
9241 m_1stNullItemsMiddleCount = 0;
9245 if(suballocations2nd.empty())
9247 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9251 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9253 suballocations1st.clear();
9254 m_1stNullItemsBeginCount = 0;
9256 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9259 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9260 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9261 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9262 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9264 ++m_1stNullItemsBeginCount;
9265 --m_1stNullItemsMiddleCount;
9267 m_2ndNullItemsCount = 0;
9268 m_1stVectorIndex ^= 1;
9273 VMA_HEAVY_ASSERT(Validate());
9280 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
9283 memset(m_FreeList, 0,
sizeof(m_FreeList));
9286 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9291 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
9293 VmaBlockMetadata::Init(size);
9295 Node* rootNode =
new Node();
9296 rootNode->offset = 0;
9297 rootNode->type = Node::TYPE_FREE;
9298 rootNode->parent = VMA_NULL;
9299 rootNode->buddy = VMA_NULL;
9302 AddToFreeListFront(0, rootNode);
9305 bool VmaBlockMetadata_Buddy::Validate()
const 9308 if(!ValidateNode(VMA_NULL, m_Root, 0, GetSize()))
9314 for(uint32_t level = 0; level < MAX_LEVELS; ++level)
9316 if(m_FreeList[level].front != VMA_NULL &&
9317 m_FreeList[level].front->free.prev != VMA_NULL)
9322 for(Node* node = m_FreeList[level].front;
9324 node = node->free.next)
9326 if(node->type != Node::TYPE_FREE)
9331 if(node->free.next == VMA_NULL)
9333 if(m_FreeList[level].back != node)
9340 if(node->free.next->free.prev != node)
9351 size_t VmaBlockMetadata_Buddy::GetAllocationCount()
const 9356 VkDeviceSize VmaBlockMetadata_Buddy::GetSumFreeSize()
const 9361 VkDeviceSize VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 9366 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9377 CalcAllocationStatInfoNode(outInfo, m_Root, GetSize());
9380 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 9385 #if VMA_STATS_STRING_ENABLED 9387 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 9391 CalcAllocationStatInfo(stat);
9393 PrintDetailedMap_Begin(
9399 PrintDetailedMapNode(json, m_Root, GetSize());
9401 PrintDetailedMap_End(json);
9404 #endif // #if VMA_STATS_STRING_ENABLED 9406 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9407 uint32_t currentFrameIndex,
9408 uint32_t frameInUseCount,
9409 VkDeviceSize bufferImageGranularity,
9410 VkDeviceSize allocSize,
9411 VkDeviceSize allocAlignment,
9413 VmaSuballocationType allocType,
9414 bool canMakeOtherLost,
9416 VmaAllocationRequest* pAllocationRequest)
9418 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9420 const VkDeviceSize size = GetSize();
9421 if(allocSize > size)
9426 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9427 for(uint32_t level = targetLevel + 1; level--; )
9429 if(m_FreeList[level].front != VMA_NULL)
9431 pAllocationRequest->offset = m_FreeList[level].front->offset;
9432 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
9433 pAllocationRequest->sumItemSize = 0;
9434 pAllocationRequest->itemsToMakeLostCount = 0;
9435 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9443 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
9444 uint32_t currentFrameIndex,
9445 uint32_t frameInUseCount,
9446 VmaAllocationRequest* pAllocationRequest)
9451 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9456 VkResult VmaBlockMetadata_Buddy::CheckCorruption(
const void* pBlockData)
9461 void VmaBlockMetadata_Buddy::Alloc(
9462 const VmaAllocationRequest& request,
9463 VmaSuballocationType type,
9464 VkDeviceSize allocSize,
9468 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9469 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
9470 VMA_ASSERT(m_FreeList[currLevel].front != VMA_NULL);
9471 Node* currNode = m_FreeList[currLevel].front;
9472 VMA_ASSERT(currNode->type == Node::TYPE_FREE);
9473 VMA_ASSERT(currNode->offset == request.offset);
9476 while(currLevel < targetLevel)
9480 RemoveFromFreeList(currLevel, currNode);
9482 const uint32_t childrenLevel = currLevel + 1;
9485 Node* leftChild =
new Node();
9486 Node* rightChild =
new Node();
9488 leftChild->offset = currNode->offset;
9489 leftChild->type = Node::TYPE_FREE;
9490 leftChild->parent = currNode;
9491 leftChild->buddy = rightChild;
9493 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9494 rightChild->type = Node::TYPE_FREE;
9495 rightChild->parent = currNode;
9496 rightChild->buddy = leftChild;
9499 currNode->type = Node::TYPE_SPLIT;
9500 currNode->split.leftChild = leftChild;
9503 AddToFreeListFront(childrenLevel, rightChild);
9504 AddToFreeListFront(childrenLevel, leftChild);
9507 currNode = m_FreeList[currLevel].front;
9511 VMA_ASSERT(currLevel == targetLevel && currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9512 RemoveFromFreeList(currLevel, currNode);
9515 currNode->type = Node::TYPE_ALLOCATION;
9516 currNode->allocation.alloc = hAllocation;
9519 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
9521 if(node->type == Node::TYPE_SPLIT)
9523 DeleteNode(node->split.leftChild->buddy);
9524 DeleteNode(node->split.leftChild);
9530 bool VmaBlockMetadata_Buddy::ValidateNode(
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 9532 if(curr->parent != parent)
9536 if((curr->buddy == VMA_NULL) != (parent == VMA_NULL))
9540 if(curr->buddy != VMA_NULL && curr->buddy->buddy != curr)
9546 case Node::TYPE_FREE:
9549 case Node::TYPE_ALLOCATION:
9550 if(curr->allocation.alloc == VK_NULL_HANDLE)
9555 case Node::TYPE_SPLIT:
9557 const uint32_t childrenLevel = level + 1;
9558 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
9559 const Node*
const leftChild = curr->split.leftChild;
9560 if(leftChild == VMA_NULL)
9564 if(leftChild->offset != curr->offset)
9568 if(!ValidateNode(curr, leftChild, childrenLevel, childrenLevelNodeSize))
9572 const Node*
const rightChild = leftChild->buddy;
9573 if(rightChild->offset != curr->offset + levelNodeSize)
9577 if(!ValidateNode(curr, rightChild, childrenLevel, childrenLevelNodeSize))
9590 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 9594 VkDeviceSize currLevelNodeSize = GetSize();
9595 VkDeviceSize nextLevelNodeSize = currLevelNodeSize / 2;
9596 while(allocSize <= nextLevelNodeSize && level + 1 < MAX_LEVELS)
9599 currLevelNodeSize = nextLevelNodeSize;
9600 nextLevelNodeSize = currLevelNodeSize / 2;
9605 VkDeviceSize VmaBlockMetadata_Buddy::LevelToNodeSize(uint32_t level)
const 9608 VkDeviceSize result = GetSize();
9609 for(uint32_t i = 0; i < level; ++i)
9616 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
9619 Node* node = m_Root;
9620 VkDeviceSize nodeOffset = 0;
9622 VkDeviceSize levelSize = GetSize();
9623 while(node->type == Node::TYPE_SPLIT)
9625 const VkDeviceSize nextLevelSize = levelSize / 2;
9626 if(offset < nodeOffset + nextLevelSize)
9628 node = node->split.leftChild;
9632 node = node->split.leftChild->buddy;
9633 nodeOffset += nextLevelSize;
9636 levelSize = nextLevelSize;
9639 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9640 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
9642 node->type = Node::TYPE_FREE;
9645 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
9647 RemoveFromFreeList(level, node->buddy);
9648 Node*
const parent = node->parent;
9652 parent->type = Node::TYPE_FREE;
9658 AddToFreeListFront(level, node);
9661 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 9665 case Node::TYPE_FREE:
9671 case Node::TYPE_ALLOCATION:
9677 case Node::TYPE_SPLIT:
9679 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9680 const Node*
const leftChild = node->split.leftChild;
9681 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
9682 const Node*
const rightChild = leftChild->buddy;
9683 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
9691 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
9693 VMA_ASSERT(node->type == Node::TYPE_FREE);
9696 Node*
const frontNode = m_FreeList[level].front;
9697 if(frontNode == VMA_NULL)
9699 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9700 node->free.prev = node->free.next = VMA_NULL;
9701 m_FreeList[level].front = m_FreeList[level].back = node;
9705 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9706 node->free.prev = VMA_NULL;
9707 node->free.next = frontNode;
9708 frontNode->free.prev = node;
9709 m_FreeList[level].front = node;
9713 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
9715 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
9718 if(node->free.prev == VMA_NULL)
9720 VMA_ASSERT(m_FreeList[level].front == node);
9721 m_FreeList[level].front = node->free.next;
9725 Node*
const prevFreeNode = node->free.prev;
9726 VMA_ASSERT(prevFreeNode->free.next == node);
9727 prevFreeNode->free.next = node->free.next;
9731 if(node->free.next == VMA_NULL)
9733 VMA_ASSERT(m_FreeList[level].back == node);
9734 m_FreeList[level].back = node->free.prev;
9738 Node*
const nextFreeNode = node->free.next;
9739 VMA_ASSERT(nextFreeNode->free.prev == node);
9740 nextFreeNode->free.prev = node->free.prev;
9744 #if VMA_STATS_STRING_ENABLED 9745 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 9749 case Node::TYPE_FREE:
9750 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
9752 case Node::TYPE_ALLOCATION:
9753 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
9755 case Node::TYPE_SPLIT:
9757 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9758 const Node*
const leftChild = node->split.leftChild;
9759 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
9760 const Node*
const rightChild = leftChild->buddy;
9761 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
9768 #endif // #if VMA_STATS_STRING_ENABLED 9774 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
9775 m_pMetadata(VMA_NULL),
9776 m_MemoryTypeIndex(UINT32_MAX),
9778 m_hMemory(VK_NULL_HANDLE),
9780 m_pMappedData(VMA_NULL)
9784 void VmaDeviceMemoryBlock::Init(
9786 uint32_t newMemoryTypeIndex,
9787 VkDeviceMemory newMemory,
9788 VkDeviceSize newSize,
9792 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
9794 m_MemoryTypeIndex = newMemoryTypeIndex;
9796 m_hMemory = newMemory;
9801 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
9804 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
9810 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
9812 m_pMetadata->Init(newSize);
9815 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
9819 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
9821 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
9822 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
9823 m_hMemory = VK_NULL_HANDLE;
9825 vma_delete(allocator, m_pMetadata);
9826 m_pMetadata = VMA_NULL;
9829 bool VmaDeviceMemoryBlock::Validate()
const 9831 if((m_hMemory == VK_NULL_HANDLE) ||
9832 (m_pMetadata->GetSize() == 0))
9837 return m_pMetadata->Validate();
9840 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
9842 void* pData =
nullptr;
9843 VkResult res = Map(hAllocator, 1, &pData);
9844 if(res != VK_SUCCESS)
9849 res = m_pMetadata->CheckCorruption(pData);
9851 Unmap(hAllocator, 1);
9856 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
9863 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9866 m_MapCount += count;
9867 VMA_ASSERT(m_pMappedData != VMA_NULL);
9868 if(ppData != VMA_NULL)
9870 *ppData = m_pMappedData;
9876 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9877 hAllocator->m_hDevice,
9883 if(result == VK_SUCCESS)
9885 if(ppData != VMA_NULL)
9887 *ppData = m_pMappedData;
9895 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
9902 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9903 if(m_MapCount >= count)
9905 m_MapCount -= count;
9908 m_pMappedData = VMA_NULL;
9909 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
9914 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
9918 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
9920 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
9921 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
9924 VkResult res = Map(hAllocator, 1, &pData);
9925 if(res != VK_SUCCESS)
9930 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
9931 VmaWriteMagicValue(pData, allocOffset + allocSize);
9933 Unmap(hAllocator, 1);
9938 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
9940 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
9941 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
9944 VkResult res = Map(hAllocator, 1, &pData);
9945 if(res != VK_SUCCESS)
9950 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
9952 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
9954 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
9956 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
9959 Unmap(hAllocator, 1);
9964 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
9969 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
9970 hAllocation->GetBlock() ==
this);
9972 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9973 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
9974 hAllocator->m_hDevice,
9977 hAllocation->GetOffset());
9980 VkResult VmaDeviceMemoryBlock::BindImageMemory(
9985 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
9986 hAllocation->GetBlock() ==
this);
9988 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9989 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
9990 hAllocator->m_hDevice,
9993 hAllocation->GetOffset());
9998 memset(&outInfo, 0,
sizeof(outInfo));
10017 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
10025 VmaPool_T::VmaPool_T(
10028 VkDeviceSize preferredBlockSize) :
10031 createInfo.memoryTypeIndex,
10032 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
10033 createInfo.minBlockCount,
10034 createInfo.maxBlockCount,
10036 createInfo.frameInUseCount,
10038 createInfo.blockSize != 0,
10044 VmaPool_T::~VmaPool_T()
10048 #if VMA_STATS_STRING_ENABLED 10050 #endif // #if VMA_STATS_STRING_ENABLED 10052 VmaBlockVector::VmaBlockVector(
10054 uint32_t memoryTypeIndex,
10055 VkDeviceSize preferredBlockSize,
10056 size_t minBlockCount,
10057 size_t maxBlockCount,
10058 VkDeviceSize bufferImageGranularity,
10059 uint32_t frameInUseCount,
10061 bool explicitBlockSize,
10062 uint32_t algorithm) :
10063 m_hAllocator(hAllocator),
10064 m_MemoryTypeIndex(memoryTypeIndex),
10065 m_PreferredBlockSize(preferredBlockSize),
10066 m_MinBlockCount(minBlockCount),
10067 m_MaxBlockCount(maxBlockCount),
10068 m_BufferImageGranularity(bufferImageGranularity),
10069 m_FrameInUseCount(frameInUseCount),
10070 m_IsCustomPool(isCustomPool),
10071 m_ExplicitBlockSize(explicitBlockSize),
10072 m_Algorithm(algorithm),
10073 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
10074 m_HasEmptyBlock(false),
10075 m_pDefragmentator(VMA_NULL),
10080 VmaBlockVector::~VmaBlockVector()
10082 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
10084 for(
size_t i = m_Blocks.size(); i--; )
10086 m_Blocks[i]->Destroy(m_hAllocator);
10087 vma_delete(m_hAllocator, m_Blocks[i]);
10091 VkResult VmaBlockVector::CreateMinBlocks()
10093 for(
size_t i = 0; i < m_MinBlockCount; ++i)
10095 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
10096 if(res != VK_SUCCESS)
10104 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
10106 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10108 const size_t blockCount = m_Blocks.size();
10117 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10119 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10120 VMA_ASSERT(pBlock);
10121 VMA_HEAVY_ASSERT(pBlock->Validate());
10122 pBlock->m_pMetadata->AddPoolStats(*pStats);
10126 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 10128 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10129 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
10130 (VMA_DEBUG_MARGIN > 0) &&
10131 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
10134 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
10136 VkResult VmaBlockVector::Allocate(
10138 uint32_t currentFrameIndex,
10140 VkDeviceSize alignment,
10142 VmaSuballocationType suballocType,
10149 const bool canCreateNewBlock =
10151 (m_Blocks.size() < m_MaxBlockCount);
10158 canMakeOtherLost =
false;
10162 if(isUpperAddress &&
10165 return VK_ERROR_FEATURE_NOT_PRESENT;
10179 return VK_ERROR_FEATURE_NOT_PRESENT;
10183 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
10185 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10188 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10195 if(!canMakeOtherLost || canCreateNewBlock)
10204 if(!m_Blocks.empty())
10206 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
10207 VMA_ASSERT(pCurrBlock);
10208 VkResult res = AllocateFromBlock(
10219 if(res == VK_SUCCESS)
10221 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
10231 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10233 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10234 VMA_ASSERT(pCurrBlock);
10235 VkResult res = AllocateFromBlock(
10246 if(res == VK_SUCCESS)
10248 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10256 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10258 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10259 VMA_ASSERT(pCurrBlock);
10260 VkResult res = AllocateFromBlock(
10271 if(res == VK_SUCCESS)
10273 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10281 if(canCreateNewBlock)
10284 VkDeviceSize newBlockSize = m_PreferredBlockSize;
10285 uint32_t newBlockSizeShift = 0;
10286 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
10288 if(!m_ExplicitBlockSize)
10291 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
10292 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
10294 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10295 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
10297 newBlockSize = smallerNewBlockSize;
10298 ++newBlockSizeShift;
10307 size_t newBlockIndex = 0;
10308 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
10310 if(!m_ExplicitBlockSize)
10312 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
10314 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10315 if(smallerNewBlockSize >= size)
10317 newBlockSize = smallerNewBlockSize;
10318 ++newBlockSizeShift;
10319 res = CreateBlock(newBlockSize, &newBlockIndex);
10328 if(res == VK_SUCCESS)
10330 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
10331 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
10333 res = AllocateFromBlock(
10344 if(res == VK_SUCCESS)
10346 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
10352 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10359 if(canMakeOtherLost)
10361 uint32_t tryIndex = 0;
10362 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
10364 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
10365 VmaAllocationRequest bestRequest = {};
10366 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
10372 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10374 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10375 VMA_ASSERT(pCurrBlock);
10376 VmaAllocationRequest currRequest = {};
10377 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10380 m_BufferImageGranularity,
10389 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10390 if(pBestRequestBlock == VMA_NULL ||
10391 currRequestCost < bestRequestCost)
10393 pBestRequestBlock = pCurrBlock;
10394 bestRequest = currRequest;
10395 bestRequestCost = currRequestCost;
10397 if(bestRequestCost == 0)
10408 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10410 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10411 VMA_ASSERT(pCurrBlock);
10412 VmaAllocationRequest currRequest = {};
10413 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10416 m_BufferImageGranularity,
10425 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10426 if(pBestRequestBlock == VMA_NULL ||
10427 currRequestCost < bestRequestCost ||
10430 pBestRequestBlock = pCurrBlock;
10431 bestRequest = currRequest;
10432 bestRequestCost = currRequestCost;
10434 if(bestRequestCost == 0 ||
10444 if(pBestRequestBlock != VMA_NULL)
10448 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
10449 if(res != VK_SUCCESS)
10455 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
10461 if(pBestRequestBlock->m_pMetadata->IsEmpty())
10463 m_HasEmptyBlock =
false;
10466 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10467 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
10468 (*pAllocation)->InitBlockAllocation(
10471 bestRequest.offset,
10477 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
10478 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
10479 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
10480 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10482 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10484 if(IsCorruptionDetectionEnabled())
10486 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
10487 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10502 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
10504 return VK_ERROR_TOO_MANY_OBJECTS;
10508 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10511 void VmaBlockVector::Free(
10514 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
10518 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10520 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
10522 if(IsCorruptionDetectionEnabled())
10524 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
10525 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
10528 if(hAllocation->IsPersistentMap())
10530 pBlock->Unmap(m_hAllocator, 1);
10533 pBlock->m_pMetadata->Free(hAllocation);
10534 VMA_HEAVY_ASSERT(pBlock->Validate());
10536 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
10539 if(pBlock->m_pMetadata->IsEmpty())
10542 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
10544 pBlockToDelete = pBlock;
10550 m_HasEmptyBlock =
true;
10555 else if(m_HasEmptyBlock)
10557 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
10558 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
10560 pBlockToDelete = pLastBlock;
10561 m_Blocks.pop_back();
10562 m_HasEmptyBlock =
false;
10566 IncrementallySortBlocks();
10571 if(pBlockToDelete != VMA_NULL)
10573 VMA_DEBUG_LOG(
" Deleted empty allocation");
10574 pBlockToDelete->Destroy(m_hAllocator);
10575 vma_delete(m_hAllocator, pBlockToDelete);
10579 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 10581 VkDeviceSize result = 0;
10582 for(
size_t i = m_Blocks.size(); i--; )
10584 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
10585 if(result >= m_PreferredBlockSize)
10593 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
10595 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10597 if(m_Blocks[blockIndex] == pBlock)
10599 VmaVectorRemove(m_Blocks, blockIndex);
10606 void VmaBlockVector::IncrementallySortBlocks()
10611 for(
size_t i = 1; i < m_Blocks.size(); ++i)
10613 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
10615 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
10622 VkResult VmaBlockVector::AllocateFromBlock(
10623 VmaDeviceMemoryBlock* pBlock,
10625 uint32_t currentFrameIndex,
10627 VkDeviceSize alignment,
10630 VmaSuballocationType suballocType,
10639 VmaAllocationRequest currRequest = {};
10640 if(pBlock->m_pMetadata->CreateAllocationRequest(
10643 m_BufferImageGranularity,
10653 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
10657 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
10658 if(res != VK_SUCCESS)
10665 if(pBlock->m_pMetadata->IsEmpty())
10667 m_HasEmptyBlock =
false;
10670 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10671 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
10672 (*pAllocation)->InitBlockAllocation(
10675 currRequest.offset,
10681 VMA_HEAVY_ASSERT(pBlock->Validate());
10682 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
10683 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10685 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10687 if(IsCorruptionDetectionEnabled())
10689 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
10690 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10694 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10697 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
10699 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
10700 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
10701 allocInfo.allocationSize = blockSize;
10702 VkDeviceMemory mem = VK_NULL_HANDLE;
10703 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
10712 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
10717 allocInfo.allocationSize,
10721 m_Blocks.push_back(pBlock);
10722 if(pNewBlockIndex != VMA_NULL)
10724 *pNewBlockIndex = m_Blocks.size() - 1;
10730 #if VMA_STATS_STRING_ENABLED 10732 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
10734 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10736 json.BeginObject();
10740 json.WriteString(
"MemoryTypeIndex");
10741 json.WriteNumber(m_MemoryTypeIndex);
10743 json.WriteString(
"BlockSize");
10744 json.WriteNumber(m_PreferredBlockSize);
10746 json.WriteString(
"BlockCount");
10747 json.BeginObject(
true);
10748 if(m_MinBlockCount > 0)
10750 json.WriteString(
"Min");
10751 json.WriteNumber((uint64_t)m_MinBlockCount);
10753 if(m_MaxBlockCount < SIZE_MAX)
10755 json.WriteString(
"Max");
10756 json.WriteNumber((uint64_t)m_MaxBlockCount);
10758 json.WriteString(
"Cur");
10759 json.WriteNumber((uint64_t)m_Blocks.size());
10762 if(m_FrameInUseCount > 0)
10764 json.WriteString(
"FrameInUseCount");
10765 json.WriteNumber(m_FrameInUseCount);
10768 if(m_Algorithm != 0)
10770 json.WriteString(
"Algorithm");
10771 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
10776 json.WriteString(
"PreferredBlockSize");
10777 json.WriteNumber(m_PreferredBlockSize);
10780 json.WriteString(
"Blocks");
10781 json.BeginObject();
10782 for(
size_t i = 0; i < m_Blocks.size(); ++i)
10784 json.BeginString();
10785 json.ContinueString(m_Blocks[i]->GetId());
10788 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
10795 #endif // #if VMA_STATS_STRING_ENABLED 10797 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
10799 uint32_t currentFrameIndex)
10801 if(m_pDefragmentator == VMA_NULL)
10803 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
10806 currentFrameIndex);
10809 return m_pDefragmentator;
10812 VkResult VmaBlockVector::Defragment(
10814 VkDeviceSize& maxBytesToMove,
10815 uint32_t& maxAllocationsToMove)
10817 if(m_pDefragmentator == VMA_NULL)
10822 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10825 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
10828 if(pDefragmentationStats != VMA_NULL)
10830 const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
10831 const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
10832 pDefragmentationStats->
bytesMoved += bytesMoved;
10834 VMA_ASSERT(bytesMoved <= maxBytesToMove);
10835 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
10836 maxBytesToMove -= bytesMoved;
10837 maxAllocationsToMove -= allocationsMoved;
10841 m_HasEmptyBlock =
false;
10842 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10844 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
10845 if(pBlock->m_pMetadata->IsEmpty())
10847 if(m_Blocks.size() > m_MinBlockCount)
10849 if(pDefragmentationStats != VMA_NULL)
10852 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
10855 VmaVectorRemove(m_Blocks, blockIndex);
10856 pBlock->Destroy(m_hAllocator);
10857 vma_delete(m_hAllocator, pBlock);
10861 m_HasEmptyBlock =
true;
10869 void VmaBlockVector::DestroyDefragmentator()
10871 if(m_pDefragmentator != VMA_NULL)
10873 vma_delete(m_hAllocator, m_pDefragmentator);
10874 m_pDefragmentator = VMA_NULL;
10878 void VmaBlockVector::MakePoolAllocationsLost(
10879 uint32_t currentFrameIndex,
10880 size_t* pLostAllocationCount)
10882 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10883 size_t lostAllocationCount = 0;
10884 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10886 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10887 VMA_ASSERT(pBlock);
10888 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
10890 if(pLostAllocationCount != VMA_NULL)
10892 *pLostAllocationCount = lostAllocationCount;
10896 VkResult VmaBlockVector::CheckCorruption()
10898 if(!IsCorruptionDetectionEnabled())
10900 return VK_ERROR_FEATURE_NOT_PRESENT;
10903 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10904 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10906 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10907 VMA_ASSERT(pBlock);
10908 VkResult res = pBlock->CheckCorruption(m_hAllocator);
10909 if(res != VK_SUCCESS)
10917 void VmaBlockVector::AddStats(
VmaStats* pStats)
10919 const uint32_t memTypeIndex = m_MemoryTypeIndex;
10920 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
10922 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10924 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10926 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10927 VMA_ASSERT(pBlock);
10928 VMA_HEAVY_ASSERT(pBlock->Validate());
10930 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
10931 VmaAddStatInfo(pStats->
total, allocationStatInfo);
10932 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
10933 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
10940 VmaDefragmentator::VmaDefragmentator(
10942 VmaBlockVector* pBlockVector,
10943 uint32_t currentFrameIndex) :
10944 m_hAllocator(hAllocator),
10945 m_pBlockVector(pBlockVector),
10946 m_CurrentFrameIndex(currentFrameIndex),
10948 m_AllocationsMoved(0),
10949 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
10950 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
10952 VMA_ASSERT(pBlockVector->GetAlgorithm() == 0);
10955 VmaDefragmentator::~VmaDefragmentator()
10957 for(
size_t i = m_Blocks.size(); i--; )
10959 vma_delete(m_hAllocator, m_Blocks[i]);
10963 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
10965 AllocationInfo allocInfo;
10966 allocInfo.m_hAllocation = hAlloc;
10967 allocInfo.m_pChanged = pChanged;
10968 m_Allocations.push_back(allocInfo);
10971 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
10974 if(m_pMappedDataForDefragmentation)
10976 *ppMappedData = m_pMappedDataForDefragmentation;
10981 if(m_pBlock->GetMappedData())
10983 *ppMappedData = m_pBlock->GetMappedData();
10988 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
10989 *ppMappedData = m_pMappedDataForDefragmentation;
10993 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
10995 if(m_pMappedDataForDefragmentation != VMA_NULL)
10997 m_pBlock->Unmap(hAllocator, 1);
11001 VkResult VmaDefragmentator::DefragmentRound(
11002 VkDeviceSize maxBytesToMove,
11003 uint32_t maxAllocationsToMove)
11005 if(m_Blocks.empty())
11010 size_t srcBlockIndex = m_Blocks.size() - 1;
11011 size_t srcAllocIndex = SIZE_MAX;
11017 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
11019 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
11022 if(srcBlockIndex == 0)
11029 srcAllocIndex = SIZE_MAX;
11034 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
11038 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
11039 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
11041 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
11042 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
11043 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
11044 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
11047 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
11049 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
11050 VmaAllocationRequest dstAllocRequest;
11051 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
11052 m_CurrentFrameIndex,
11053 m_pBlockVector->GetFrameInUseCount(),
11054 m_pBlockVector->GetBufferImageGranularity(),
11061 &dstAllocRequest) &&
11063 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
11065 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
11068 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
11069 (m_BytesMoved + size > maxBytesToMove))
11071 return VK_INCOMPLETE;
11074 void* pDstMappedData = VMA_NULL;
11075 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
11076 if(res != VK_SUCCESS)
11081 void* pSrcMappedData = VMA_NULL;
11082 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
11083 if(res != VK_SUCCESS)
11090 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
11091 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
11092 static_cast<size_t>(size));
11094 if(VMA_DEBUG_MARGIN > 0)
11096 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
11097 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
11100 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
11105 allocInfo.m_hAllocation);
11106 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
11108 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
11110 if(allocInfo.m_pChanged != VMA_NULL)
11112 *allocInfo.m_pChanged = VK_TRUE;
11115 ++m_AllocationsMoved;
11116 m_BytesMoved += size;
11118 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
11126 if(srcAllocIndex > 0)
11132 if(srcBlockIndex > 0)
11135 srcAllocIndex = SIZE_MAX;
11145 VkResult VmaDefragmentator::Defragment(
11146 VkDeviceSize maxBytesToMove,
11147 uint32_t maxAllocationsToMove)
11149 if(m_Allocations.empty())
11155 const size_t blockCount = m_pBlockVector->m_Blocks.size();
11156 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11158 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
11159 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
11160 m_Blocks.push_back(pBlockInfo);
11164 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
11167 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
11169 AllocationInfo& allocInfo = m_Allocations[blockIndex];
11171 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11173 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
11174 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
11175 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
11177 (*it)->m_Allocations.push_back(allocInfo);
11185 m_Allocations.clear();
11187 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11189 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
11190 pBlockInfo->CalcHasNonMovableAllocations();
11191 pBlockInfo->SortAllocationsBySizeDescecnding();
11195 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
11198 VkResult result = VK_SUCCESS;
11199 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
11201 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
11205 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11207 m_Blocks[blockIndex]->Unmap(m_hAllocator);
11213 bool VmaDefragmentator::MoveMakesSense(
11214 size_t dstBlockIndex, VkDeviceSize dstOffset,
11215 size_t srcBlockIndex, VkDeviceSize srcOffset)
11217 if(dstBlockIndex < srcBlockIndex)
11221 if(dstBlockIndex > srcBlockIndex)
11225 if(dstOffset < srcOffset)
11235 #if VMA_RECORDING_ENABLED 11237 VmaRecorder::VmaRecorder() :
11242 m_StartCounter(INT64_MAX)
11248 m_UseMutex = useMutex;
11249 m_Flags = settings.
flags;
11251 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
11252 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
11255 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
11258 return VK_ERROR_INITIALIZATION_FAILED;
11262 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
11263 fprintf(m_File,
"%s\n",
"1,3");
11268 VmaRecorder::~VmaRecorder()
11270 if(m_File != VMA_NULL)
11276 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
11278 CallParams callParams;
11279 GetBasicParams(callParams);
11281 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11282 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
11286 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
11288 CallParams callParams;
11289 GetBasicParams(callParams);
11291 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11292 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
11298 CallParams callParams;
11299 GetBasicParams(callParams);
11301 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11302 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
11313 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
11315 CallParams callParams;
11316 GetBasicParams(callParams);
11318 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11319 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
11324 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
11325 const VkMemoryRequirements& vkMemReq,
11329 CallParams callParams;
11330 GetBasicParams(callParams);
11332 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11333 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11334 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11336 vkMemReq.alignment,
11337 vkMemReq.memoryTypeBits,
11345 userDataStr.GetString());
11349 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
11350 const VkMemoryRequirements& vkMemReq,
11351 bool requiresDedicatedAllocation,
11352 bool prefersDedicatedAllocation,
11356 CallParams callParams;
11357 GetBasicParams(callParams);
11359 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11360 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11361 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11363 vkMemReq.alignment,
11364 vkMemReq.memoryTypeBits,
11365 requiresDedicatedAllocation ? 1 : 0,
11366 prefersDedicatedAllocation ? 1 : 0,
11374 userDataStr.GetString());
11378 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
11379 const VkMemoryRequirements& vkMemReq,
11380 bool requiresDedicatedAllocation,
11381 bool prefersDedicatedAllocation,
11385 CallParams callParams;
11386 GetBasicParams(callParams);
11388 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11389 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11390 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11392 vkMemReq.alignment,
11393 vkMemReq.memoryTypeBits,
11394 requiresDedicatedAllocation ? 1 : 0,
11395 prefersDedicatedAllocation ? 1 : 0,
11403 userDataStr.GetString());
11407 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
11410 CallParams callParams;
11411 GetBasicParams(callParams);
11413 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11414 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11419 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
11421 const void* pUserData)
11423 CallParams callParams;
11424 GetBasicParams(callParams);
11426 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11427 UserDataString userDataStr(
11430 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11432 userDataStr.GetString());
11436 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
11439 CallParams callParams;
11440 GetBasicParams(callParams);
11442 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11443 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11448 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
11451 CallParams callParams;
11452 GetBasicParams(callParams);
11454 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11455 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11460 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
11463 CallParams callParams;
11464 GetBasicParams(callParams);
11466 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11467 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11472 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
11473 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11475 CallParams callParams;
11476 GetBasicParams(callParams);
11478 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11479 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11486 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
11487 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11489 CallParams callParams;
11490 GetBasicParams(callParams);
11492 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11493 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11500 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
11501 const VkBufferCreateInfo& bufCreateInfo,
11505 CallParams callParams;
11506 GetBasicParams(callParams);
11508 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11509 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11510 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11511 bufCreateInfo.flags,
11512 bufCreateInfo.size,
11513 bufCreateInfo.usage,
11514 bufCreateInfo.sharingMode,
11515 allocCreateInfo.
flags,
11516 allocCreateInfo.
usage,
11520 allocCreateInfo.
pool,
11522 userDataStr.GetString());
11526 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
11527 const VkImageCreateInfo& imageCreateInfo,
11531 CallParams callParams;
11532 GetBasicParams(callParams);
11534 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11535 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11536 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11537 imageCreateInfo.flags,
11538 imageCreateInfo.imageType,
11539 imageCreateInfo.format,
11540 imageCreateInfo.extent.width,
11541 imageCreateInfo.extent.height,
11542 imageCreateInfo.extent.depth,
11543 imageCreateInfo.mipLevels,
11544 imageCreateInfo.arrayLayers,
11545 imageCreateInfo.samples,
11546 imageCreateInfo.tiling,
11547 imageCreateInfo.usage,
11548 imageCreateInfo.sharingMode,
11549 imageCreateInfo.initialLayout,
11550 allocCreateInfo.
flags,
11551 allocCreateInfo.
usage,
11555 allocCreateInfo.
pool,
11557 userDataStr.GetString());
11561 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
11564 CallParams callParams;
11565 GetBasicParams(callParams);
11567 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11568 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
11573 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
11576 CallParams callParams;
11577 GetBasicParams(callParams);
11579 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11580 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
11585 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
11588 CallParams callParams;
11589 GetBasicParams(callParams);
11591 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11592 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11597 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
11600 CallParams callParams;
11601 GetBasicParams(callParams);
11603 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11604 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
11609 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
11612 CallParams callParams;
11613 GetBasicParams(callParams);
11615 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11616 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
11623 if(pUserData != VMA_NULL)
11627 m_Str = (
const char*)pUserData;
11631 sprintf_s(m_PtrStr,
"%p", pUserData);
11641 void VmaRecorder::WriteConfiguration(
11642 const VkPhysicalDeviceProperties& devProps,
11643 const VkPhysicalDeviceMemoryProperties& memProps,
11644 bool dedicatedAllocationExtensionEnabled)
11646 fprintf(m_File,
"Config,Begin\n");
11648 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
11649 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
11650 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
11651 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
11652 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
11653 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
11655 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
11656 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
11657 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
11659 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
11660 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
11662 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
11663 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
11665 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
11666 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
11668 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
11669 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
11672 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
11674 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
11675 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
11676 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
11677 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
11678 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
11679 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
11680 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
11681 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
11682 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
11684 fprintf(m_File,
"Config,End\n");
11687 void VmaRecorder::GetBasicParams(CallParams& outParams)
11689 outParams.threadId = GetCurrentThreadId();
11691 LARGE_INTEGER counter;
11692 QueryPerformanceCounter(&counter);
11693 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
11696 void VmaRecorder::Flush()
11704 #endif // #if VMA_RECORDING_ENABLED 11712 m_hDevice(pCreateInfo->device),
11713 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
11714 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
11715 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
11716 m_PreferredLargeHeapBlockSize(0),
11717 m_PhysicalDevice(pCreateInfo->physicalDevice),
11718 m_CurrentFrameIndex(0),
11719 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
11722 ,m_pRecorder(VMA_NULL)
11725 if(VMA_DEBUG_DETECT_CORRUPTION)
11728 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
11733 #if !(VMA_DEDICATED_ALLOCATION) 11736 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
11740 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
11741 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
11742 memset(&m_MemProps, 0,
sizeof(m_MemProps));
11744 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
11745 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
11747 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
11749 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
11760 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
11761 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
11763 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
11764 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
11765 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
11766 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
11773 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
11775 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
11776 if(limit != VK_WHOLE_SIZE)
11778 m_HeapSizeLimit[heapIndex] = limit;
11779 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
11781 m_MemProps.memoryHeaps[heapIndex].size = limit;
11787 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11789 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
11791 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
11794 preferredBlockSize,
11797 GetBufferImageGranularity(),
11804 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
11811 VkResult res = VK_SUCCESS;
11816 #if VMA_RECORDING_ENABLED 11817 m_pRecorder = vma_new(
this, VmaRecorder)();
11819 if(res != VK_SUCCESS)
11823 m_pRecorder->WriteConfiguration(
11824 m_PhysicalDeviceProperties,
11826 m_UseKhrDedicatedAllocation);
11827 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
11829 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
11830 return VK_ERROR_FEATURE_NOT_PRESENT;
11837 VmaAllocator_T::~VmaAllocator_T()
11839 #if VMA_RECORDING_ENABLED 11840 if(m_pRecorder != VMA_NULL)
11842 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
11843 vma_delete(
this, m_pRecorder);
11847 VMA_ASSERT(m_Pools.empty());
11849 for(
size_t i = GetMemoryTypeCount(); i--; )
11851 vma_delete(
this, m_pDedicatedAllocations[i]);
11852 vma_delete(
this, m_pBlockVectors[i]);
11856 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
11858 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11859 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
11860 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
11861 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
11862 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
11863 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
11864 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
11865 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
11866 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
11867 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
11868 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
11869 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
11870 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
11871 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
11872 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
11873 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
11874 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
11875 #if VMA_DEDICATED_ALLOCATION 11876 if(m_UseKhrDedicatedAllocation)
11878 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
11879 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
11880 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
11881 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
11883 #endif // #if VMA_DEDICATED_ALLOCATION 11884 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11886 #define VMA_COPY_IF_NOT_NULL(funcName) \ 11887 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 11889 if(pVulkanFunctions != VMA_NULL)
11891 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
11892 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
11893 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
11894 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
11895 VMA_COPY_IF_NOT_NULL(vkMapMemory);
11896 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
11897 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
11898 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
11899 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
11900 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
11901 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
11902 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
11903 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
11904 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
11905 VMA_COPY_IF_NOT_NULL(vkCreateImage);
11906 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
11907 #if VMA_DEDICATED_ALLOCATION 11908 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
11909 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
11913 #undef VMA_COPY_IF_NOT_NULL 11917 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
11918 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
11919 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
11920 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
11921 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
11922 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
11923 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
11924 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
11925 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
11926 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
11927 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
11928 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
11929 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
11930 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
11931 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
11932 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
11933 #if VMA_DEDICATED_ALLOCATION 11934 if(m_UseKhrDedicatedAllocation)
11936 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
11937 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
11942 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
11944 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
11945 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
11946 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
11947 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
11950 VkResult VmaAllocator_T::AllocateMemoryOfType(
11952 VkDeviceSize alignment,
11953 bool dedicatedAllocation,
11954 VkBuffer dedicatedBuffer,
11955 VkImage dedicatedImage,
11957 uint32_t memTypeIndex,
11958 VmaSuballocationType suballocType,
11961 VMA_ASSERT(pAllocation != VMA_NULL);
11962 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
11968 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
11973 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
11974 VMA_ASSERT(blockVector);
11976 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
11977 bool preferDedicatedMemory =
11978 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
11979 dedicatedAllocation ||
11981 size > preferredBlockSize / 2;
11983 if(preferDedicatedMemory &&
11985 finalCreateInfo.
pool == VK_NULL_HANDLE)
11994 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11998 return AllocateDedicatedMemory(
12012 VkResult res = blockVector->Allocate(
12014 m_CurrentFrameIndex.load(),
12020 if(res == VK_SUCCESS)
12028 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12032 res = AllocateDedicatedMemory(
12038 finalCreateInfo.pUserData,
12042 if(res == VK_SUCCESS)
12045 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
12051 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12058 VkResult VmaAllocator_T::AllocateDedicatedMemory(
12060 VmaSuballocationType suballocType,
12061 uint32_t memTypeIndex,
12063 bool isUserDataString,
12065 VkBuffer dedicatedBuffer,
12066 VkImage dedicatedImage,
12069 VMA_ASSERT(pAllocation);
12071 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12072 allocInfo.memoryTypeIndex = memTypeIndex;
12073 allocInfo.allocationSize = size;
12075 #if VMA_DEDICATED_ALLOCATION 12076 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
12077 if(m_UseKhrDedicatedAllocation)
12079 if(dedicatedBuffer != VK_NULL_HANDLE)
12081 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
12082 dedicatedAllocInfo.buffer = dedicatedBuffer;
12083 allocInfo.pNext = &dedicatedAllocInfo;
12085 else if(dedicatedImage != VK_NULL_HANDLE)
12087 dedicatedAllocInfo.image = dedicatedImage;
12088 allocInfo.pNext = &dedicatedAllocInfo;
12091 #endif // #if VMA_DEDICATED_ALLOCATION 12094 VkDeviceMemory hMemory = VK_NULL_HANDLE;
12095 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
12098 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12102 void* pMappedData = VMA_NULL;
12105 res = (*m_VulkanFunctions.vkMapMemory)(
12114 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
12115 FreeVulkanMemory(memTypeIndex, size, hMemory);
12120 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
12121 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
12122 (*pAllocation)->SetUserData(
this, pUserData);
12123 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12125 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12130 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12131 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12132 VMA_ASSERT(pDedicatedAllocations);
12133 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
12136 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
12141 void VmaAllocator_T::GetBufferMemoryRequirements(
12143 VkMemoryRequirements& memReq,
12144 bool& requiresDedicatedAllocation,
12145 bool& prefersDedicatedAllocation)
const 12147 #if VMA_DEDICATED_ALLOCATION 12148 if(m_UseKhrDedicatedAllocation)
12150 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
12151 memReqInfo.buffer = hBuffer;
12153 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12155 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12156 memReq2.pNext = &memDedicatedReq;
12158 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12160 memReq = memReq2.memoryRequirements;
12161 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12162 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12165 #endif // #if VMA_DEDICATED_ALLOCATION 12167 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
12168 requiresDedicatedAllocation =
false;
12169 prefersDedicatedAllocation =
false;
12173 void VmaAllocator_T::GetImageMemoryRequirements(
12175 VkMemoryRequirements& memReq,
12176 bool& requiresDedicatedAllocation,
12177 bool& prefersDedicatedAllocation)
const 12179 #if VMA_DEDICATED_ALLOCATION 12180 if(m_UseKhrDedicatedAllocation)
12182 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
12183 memReqInfo.image = hImage;
12185 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12187 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12188 memReq2.pNext = &memDedicatedReq;
12190 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12192 memReq = memReq2.memoryRequirements;
12193 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12194 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12197 #endif // #if VMA_DEDICATED_ALLOCATION 12199 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
12200 requiresDedicatedAllocation =
false;
12201 prefersDedicatedAllocation =
false;
12205 VkResult VmaAllocator_T::AllocateMemory(
12206 const VkMemoryRequirements& vkMemReq,
12207 bool requiresDedicatedAllocation,
12208 bool prefersDedicatedAllocation,
12209 VkBuffer dedicatedBuffer,
12210 VkImage dedicatedImage,
12212 VmaSuballocationType suballocType,
12215 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
12220 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
12221 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12226 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
12227 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12229 if(requiresDedicatedAllocation)
12233 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
12234 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12236 if(createInfo.
pool != VK_NULL_HANDLE)
12238 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
12239 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12242 if((createInfo.
pool != VK_NULL_HANDLE) &&
12245 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
12246 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12249 if(createInfo.
pool != VK_NULL_HANDLE)
12251 const VkDeviceSize alignmentForPool = VMA_MAX(
12252 vkMemReq.alignment,
12253 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
12254 return createInfo.
pool->m_BlockVector.Allocate(
12256 m_CurrentFrameIndex.load(),
12266 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
12267 uint32_t memTypeIndex = UINT32_MAX;
12269 if(res == VK_SUCCESS)
12271 VkDeviceSize alignmentForMemType = VMA_MAX(
12272 vkMemReq.alignment,
12273 GetMemoryTypeMinAlignment(memTypeIndex));
12275 res = AllocateMemoryOfType(
12277 alignmentForMemType,
12278 requiresDedicatedAllocation || prefersDedicatedAllocation,
12286 if(res == VK_SUCCESS)
12296 memoryTypeBits &= ~(1u << memTypeIndex);
12299 if(res == VK_SUCCESS)
12301 alignmentForMemType = VMA_MAX(
12302 vkMemReq.alignment,
12303 GetMemoryTypeMinAlignment(memTypeIndex));
12305 res = AllocateMemoryOfType(
12307 alignmentForMemType,
12308 requiresDedicatedAllocation || prefersDedicatedAllocation,
12316 if(res == VK_SUCCESS)
12326 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12337 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
12339 VMA_ASSERT(allocation);
12341 if(TouchAllocation(allocation))
12343 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12345 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
12348 switch(allocation->GetType())
12350 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12352 VmaBlockVector* pBlockVector = VMA_NULL;
12353 VmaPool hPool = allocation->GetPool();
12354 if(hPool != VK_NULL_HANDLE)
12356 pBlockVector = &hPool->m_BlockVector;
12360 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12361 pBlockVector = m_pBlockVectors[memTypeIndex];
12363 pBlockVector->Free(allocation);
12366 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12367 FreeDedicatedMemory(allocation);
12374 allocation->SetUserData(
this, VMA_NULL);
12375 vma_delete(
this, allocation);
12378 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
12381 InitStatInfo(pStats->
total);
12382 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
12384 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12388 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12390 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12391 VMA_ASSERT(pBlockVector);
12392 pBlockVector->AddStats(pStats);
12397 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12398 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12400 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
12405 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12407 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12408 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12409 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12410 VMA_ASSERT(pDedicatedAllocVector);
12411 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
12414 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
12415 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12416 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12417 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12422 VmaPostprocessCalcStatInfo(pStats->
total);
12423 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
12424 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
12425 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
12426 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
12429 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
12431 VkResult VmaAllocator_T::Defragment(
12433 size_t allocationCount,
12434 VkBool32* pAllocationsChanged,
12438 if(pAllocationsChanged != VMA_NULL)
12440 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
12442 if(pDefragmentationStats != VMA_NULL)
12444 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
12447 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
12449 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
12451 const size_t poolCount = m_Pools.size();
12454 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12457 VMA_ASSERT(hAlloc);
12458 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
12460 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12461 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
12463 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
12465 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
12467 VmaBlockVector* pAllocBlockVector = VMA_NULL;
12469 const VmaPool hAllocPool = hAlloc->GetPool();
12471 if(hAllocPool != VK_NULL_HANDLE)
12474 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
12476 pAllocBlockVector = &hAllocPool->m_BlockVector;
12482 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
12485 if(pAllocBlockVector != VMA_NULL)
12487 VmaDefragmentator*
const pDefragmentator =
12488 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
12489 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
12490 &pAllocationsChanged[allocIndex] : VMA_NULL;
12491 pDefragmentator->AddAllocation(hAlloc, pChanged);
12496 VkResult result = VK_SUCCESS;
12500 VkDeviceSize maxBytesToMove = SIZE_MAX;
12501 uint32_t maxAllocationsToMove = UINT32_MAX;
12502 if(pDefragmentationInfo != VMA_NULL)
12509 for(uint32_t memTypeIndex = 0;
12510 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
12514 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12516 result = m_pBlockVectors[memTypeIndex]->Defragment(
12517 pDefragmentationStats,
12519 maxAllocationsToMove);
12524 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
12526 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
12527 pDefragmentationStats,
12529 maxAllocationsToMove);
12535 for(
size_t poolIndex = poolCount; poolIndex--; )
12537 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
12541 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
12543 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12545 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
12554 if(hAllocation->CanBecomeLost())
12560 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12561 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12564 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12568 pAllocationInfo->
offset = 0;
12569 pAllocationInfo->
size = hAllocation->GetSize();
12571 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12574 else if(localLastUseFrameIndex == localCurrFrameIndex)
12576 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12577 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12578 pAllocationInfo->
offset = hAllocation->GetOffset();
12579 pAllocationInfo->
size = hAllocation->GetSize();
12581 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12586 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12588 localLastUseFrameIndex = localCurrFrameIndex;
12595 #if VMA_STATS_STRING_ENABLED 12596 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12597 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12600 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12601 if(localLastUseFrameIndex == localCurrFrameIndex)
12607 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12609 localLastUseFrameIndex = localCurrFrameIndex;
12615 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12616 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12617 pAllocationInfo->
offset = hAllocation->GetOffset();
12618 pAllocationInfo->
size = hAllocation->GetSize();
12619 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
12620 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12624 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
12627 if(hAllocation->CanBecomeLost())
12629 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12630 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12633 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12637 else if(localLastUseFrameIndex == localCurrFrameIndex)
12643 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12645 localLastUseFrameIndex = localCurrFrameIndex;
12652 #if VMA_STATS_STRING_ENABLED 12653 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12654 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12657 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12658 if(localLastUseFrameIndex == localCurrFrameIndex)
12664 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12666 localLastUseFrameIndex = localCurrFrameIndex;
12678 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
12688 return VK_ERROR_INITIALIZATION_FAILED;
12691 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
12693 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
12695 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
12696 if(res != VK_SUCCESS)
12698 vma_delete(
this, *pPool);
12705 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12706 (*pPool)->SetId(m_NextPoolId++);
12707 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
12713 void VmaAllocator_T::DestroyPool(
VmaPool pool)
12717 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12718 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
12719 VMA_ASSERT(success &&
"Pool not found in Allocator.");
12722 vma_delete(
this, pool);
12727 pool->m_BlockVector.GetPoolStats(pPoolStats);
12730 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
12732 m_CurrentFrameIndex.store(frameIndex);
12735 void VmaAllocator_T::MakePoolAllocationsLost(
12737 size_t* pLostAllocationCount)
12739 hPool->m_BlockVector.MakePoolAllocationsLost(
12740 m_CurrentFrameIndex.load(),
12741 pLostAllocationCount);
12744 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
12746 return hPool->m_BlockVector.CheckCorruption();
12749 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
12751 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
12754 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12756 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
12758 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12759 VMA_ASSERT(pBlockVector);
12760 VkResult localRes = pBlockVector->CheckCorruption();
12763 case VK_ERROR_FEATURE_NOT_PRESENT:
12766 finalRes = VK_SUCCESS;
12776 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12777 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12779 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
12781 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
12784 case VK_ERROR_FEATURE_NOT_PRESENT:
12787 finalRes = VK_SUCCESS;
12799 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
12801 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
12802 (*pAllocation)->InitLost();
12805 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
12807 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
12810 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
12812 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
12813 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
12815 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
12816 if(res == VK_SUCCESS)
12818 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
12823 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
12828 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
12831 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
12833 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
12839 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
12841 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
12843 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
12846 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
12848 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
12849 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
12851 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
12852 m_HeapSizeLimit[heapIndex] += size;
12856 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
12858 if(hAllocation->CanBecomeLost())
12860 return VK_ERROR_MEMORY_MAP_FAILED;
12863 switch(hAllocation->GetType())
12865 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12867 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12868 char *pBytes = VMA_NULL;
12869 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
12870 if(res == VK_SUCCESS)
12872 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
12873 hAllocation->BlockAllocMap();
12877 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12878 return hAllocation->DedicatedAllocMap(
this, ppData);
12881 return VK_ERROR_MEMORY_MAP_FAILED;
12887 switch(hAllocation->GetType())
12889 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12891 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12892 hAllocation->BlockAllocUnmap();
12893 pBlock->Unmap(
this, 1);
12896 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12897 hAllocation->DedicatedAllocUnmap(
this);
12904 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
12906 VkResult res = VK_SUCCESS;
12907 switch(hAllocation->GetType())
12909 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12910 res = GetVulkanFunctions().vkBindBufferMemory(
12913 hAllocation->GetMemory(),
12916 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12918 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12919 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
12920 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
12929 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
12931 VkResult res = VK_SUCCESS;
12932 switch(hAllocation->GetType())
12934 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12935 res = GetVulkanFunctions().vkBindImageMemory(
12938 hAllocation->GetMemory(),
12941 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12943 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12944 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
12945 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
12954 void VmaAllocator_T::FlushOrInvalidateAllocation(
12956 VkDeviceSize offset, VkDeviceSize size,
12957 VMA_CACHE_OPERATION op)
12959 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
12960 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
12962 const VkDeviceSize allocationSize = hAllocation->GetSize();
12963 VMA_ASSERT(offset <= allocationSize);
12965 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12967 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12968 memRange.memory = hAllocation->GetMemory();
12970 switch(hAllocation->GetType())
12972 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12973 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
12974 if(size == VK_WHOLE_SIZE)
12976 memRange.size = allocationSize - memRange.offset;
12980 VMA_ASSERT(offset + size <= allocationSize);
12981 memRange.size = VMA_MIN(
12982 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
12983 allocationSize - memRange.offset);
12987 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12990 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
12991 if(size == VK_WHOLE_SIZE)
12993 size = allocationSize - offset;
12997 VMA_ASSERT(offset + size <= allocationSize);
12999 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
13002 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
13003 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
13004 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
13005 memRange.offset += allocationOffset;
13006 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
13017 case VMA_CACHE_FLUSH:
13018 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
13020 case VMA_CACHE_INVALIDATE:
13021 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
13030 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
13032 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
13034 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
13036 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13037 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
13038 VMA_ASSERT(pDedicatedAllocations);
13039 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
13040 VMA_ASSERT(success);
13043 VkDeviceMemory hMemory = allocation->GetMemory();
13045 if(allocation->GetMappedData() != VMA_NULL)
13047 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
13050 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
13052 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
13055 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
13057 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
13058 !hAllocation->CanBecomeLost() &&
13059 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13061 void* pData = VMA_NULL;
13062 VkResult res = Map(hAllocation, &pData);
13063 if(res == VK_SUCCESS)
13065 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
13066 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
13067 Unmap(hAllocation);
13071 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
13076 #if VMA_STATS_STRING_ENABLED 13078 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
13080 bool dedicatedAllocationsStarted =
false;
13081 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13083 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13084 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
13085 VMA_ASSERT(pDedicatedAllocVector);
13086 if(pDedicatedAllocVector->empty() ==
false)
13088 if(dedicatedAllocationsStarted ==
false)
13090 dedicatedAllocationsStarted =
true;
13091 json.WriteString(
"DedicatedAllocations");
13092 json.BeginObject();
13095 json.BeginString(
"Type ");
13096 json.ContinueString(memTypeIndex);
13101 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
13103 json.BeginObject(
true);
13105 hAlloc->PrintParameters(json);
13112 if(dedicatedAllocationsStarted)
13118 bool allocationsStarted =
false;
13119 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13121 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
13123 if(allocationsStarted ==
false)
13125 allocationsStarted =
true;
13126 json.WriteString(
"DefaultPools");
13127 json.BeginObject();
13130 json.BeginString(
"Type ");
13131 json.ContinueString(memTypeIndex);
13134 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
13137 if(allocationsStarted)
13145 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13146 const size_t poolCount = m_Pools.size();
13149 json.WriteString(
"Pools");
13150 json.BeginObject();
13151 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13153 json.BeginString();
13154 json.ContinueString(m_Pools[poolIndex]->GetId());
13157 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
13164 #endif // #if VMA_STATS_STRING_ENABLED 13173 VMA_ASSERT(pCreateInfo && pAllocator);
13174 VMA_DEBUG_LOG(
"vmaCreateAllocator");
13176 return (*pAllocator)->Init(pCreateInfo);
13182 if(allocator != VK_NULL_HANDLE)
13184 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
13185 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
13186 vma_delete(&allocationCallbacks, allocator);
13192 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
13194 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
13195 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
13200 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
13202 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
13203 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
13208 uint32_t memoryTypeIndex,
13209 VkMemoryPropertyFlags* pFlags)
13211 VMA_ASSERT(allocator && pFlags);
13212 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
13213 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
13218 uint32_t frameIndex)
13220 VMA_ASSERT(allocator);
13221 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
13223 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13225 allocator->SetCurrentFrameIndex(frameIndex);
13232 VMA_ASSERT(allocator && pStats);
13233 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13234 allocator->CalculateStats(pStats);
13237 #if VMA_STATS_STRING_ENABLED 13241 char** ppStatsString,
13242 VkBool32 detailedMap)
13244 VMA_ASSERT(allocator && ppStatsString);
13245 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13247 VmaStringBuilder sb(allocator);
13249 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
13250 json.BeginObject();
13253 allocator->CalculateStats(&stats);
13255 json.WriteString(
"Total");
13256 VmaPrintStatInfo(json, stats.
total);
13258 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
13260 json.BeginString(
"Heap ");
13261 json.ContinueString(heapIndex);
13263 json.BeginObject();
13265 json.WriteString(
"Size");
13266 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
13268 json.WriteString(
"Flags");
13269 json.BeginArray(
true);
13270 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
13272 json.WriteString(
"DEVICE_LOCAL");
13278 json.WriteString(
"Stats");
13279 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
13282 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
13284 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
13286 json.BeginString(
"Type ");
13287 json.ContinueString(typeIndex);
13290 json.BeginObject();
13292 json.WriteString(
"Flags");
13293 json.BeginArray(
true);
13294 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
13295 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
13297 json.WriteString(
"DEVICE_LOCAL");
13299 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13301 json.WriteString(
"HOST_VISIBLE");
13303 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
13305 json.WriteString(
"HOST_COHERENT");
13307 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
13309 json.WriteString(
"HOST_CACHED");
13311 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
13313 json.WriteString(
"LAZILY_ALLOCATED");
13319 json.WriteString(
"Stats");
13320 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
13329 if(detailedMap == VK_TRUE)
13331 allocator->PrintDetailedMap(json);
13337 const size_t len = sb.GetLength();
13338 char*
const pChars = vma_new_array(allocator,
char, len + 1);
13341 memcpy(pChars, sb.GetData(), len);
13343 pChars[len] =
'\0';
13344 *ppStatsString = pChars;
13349 char* pStatsString)
13351 if(pStatsString != VMA_NULL)
13353 VMA_ASSERT(allocator);
13354 size_t len = strlen(pStatsString);
13355 vma_delete_array(allocator, pStatsString, len + 1);
13359 #endif // #if VMA_STATS_STRING_ENABLED 13366 uint32_t memoryTypeBits,
13368 uint32_t* pMemoryTypeIndex)
13370 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13371 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13372 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13379 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
13380 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
13385 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13389 switch(pAllocationCreateInfo->
usage)
13394 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13396 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13400 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13403 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13404 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13406 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13410 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13411 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
13417 *pMemoryTypeIndex = UINT32_MAX;
13418 uint32_t minCost = UINT32_MAX;
13419 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
13420 memTypeIndex < allocator->GetMemoryTypeCount();
13421 ++memTypeIndex, memTypeBit <<= 1)
13424 if((memTypeBit & memoryTypeBits) != 0)
13426 const VkMemoryPropertyFlags currFlags =
13427 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
13429 if((requiredFlags & ~currFlags) == 0)
13432 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
13434 if(currCost < minCost)
13436 *pMemoryTypeIndex = memTypeIndex;
13441 minCost = currCost;
13446 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
13451 const VkBufferCreateInfo* pBufferCreateInfo,
13453 uint32_t* pMemoryTypeIndex)
13455 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13456 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
13457 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13458 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13460 const VkDevice hDev = allocator->m_hDevice;
13461 VkBuffer hBuffer = VK_NULL_HANDLE;
13462 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
13463 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
13464 if(res == VK_SUCCESS)
13466 VkMemoryRequirements memReq = {};
13467 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
13468 hDev, hBuffer, &memReq);
13472 memReq.memoryTypeBits,
13473 pAllocationCreateInfo,
13476 allocator->GetVulkanFunctions().vkDestroyBuffer(
13477 hDev, hBuffer, allocator->GetAllocationCallbacks());
13484 const VkImageCreateInfo* pImageCreateInfo,
13486 uint32_t* pMemoryTypeIndex)
13488 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13489 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
13490 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13491 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13493 const VkDevice hDev = allocator->m_hDevice;
13494 VkImage hImage = VK_NULL_HANDLE;
13495 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
13496 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
13497 if(res == VK_SUCCESS)
13499 VkMemoryRequirements memReq = {};
13500 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
13501 hDev, hImage, &memReq);
13505 memReq.memoryTypeBits,
13506 pAllocationCreateInfo,
13509 allocator->GetVulkanFunctions().vkDestroyImage(
13510 hDev, hImage, allocator->GetAllocationCallbacks());
13520 VMA_ASSERT(allocator && pCreateInfo && pPool);
13522 VMA_DEBUG_LOG(
"vmaCreatePool");
13524 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13526 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
13528 #if VMA_RECORDING_ENABLED 13529 if(allocator->GetRecorder() != VMA_NULL)
13531 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
13542 VMA_ASSERT(allocator);
13544 if(pool == VK_NULL_HANDLE)
13549 VMA_DEBUG_LOG(
"vmaDestroyPool");
13551 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13553 #if VMA_RECORDING_ENABLED 13554 if(allocator->GetRecorder() != VMA_NULL)
13556 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
13560 allocator->DestroyPool(pool);
13568 VMA_ASSERT(allocator && pool && pPoolStats);
13570 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13572 allocator->GetPoolStats(pool, pPoolStats);
13578 size_t* pLostAllocationCount)
13580 VMA_ASSERT(allocator && pool);
13582 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13584 #if VMA_RECORDING_ENABLED 13585 if(allocator->GetRecorder() != VMA_NULL)
13587 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
13591 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
13596 VMA_ASSERT(allocator && pool);
13598 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13600 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
13602 return allocator->CheckPoolCorruption(pool);
13607 const VkMemoryRequirements* pVkMemoryRequirements,
13612 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
13614 VMA_DEBUG_LOG(
"vmaAllocateMemory");
13616 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13618 VkResult result = allocator->AllocateMemory(
13619 *pVkMemoryRequirements,
13625 VMA_SUBALLOCATION_TYPE_UNKNOWN,
13628 #if VMA_RECORDING_ENABLED 13629 if(allocator->GetRecorder() != VMA_NULL)
13631 allocator->GetRecorder()->RecordAllocateMemory(
13632 allocator->GetCurrentFrameIndex(),
13633 *pVkMemoryRequirements,
13639 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
13641 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13654 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13656 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
13658 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13660 VkMemoryRequirements vkMemReq = {};
13661 bool requiresDedicatedAllocation =
false;
13662 bool prefersDedicatedAllocation =
false;
13663 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
13664 requiresDedicatedAllocation,
13665 prefersDedicatedAllocation);
13667 VkResult result = allocator->AllocateMemory(
13669 requiresDedicatedAllocation,
13670 prefersDedicatedAllocation,
13674 VMA_SUBALLOCATION_TYPE_BUFFER,
13677 #if VMA_RECORDING_ENABLED 13678 if(allocator->GetRecorder() != VMA_NULL)
13680 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
13681 allocator->GetCurrentFrameIndex(),
13683 requiresDedicatedAllocation,
13684 prefersDedicatedAllocation,
13690 if(pAllocationInfo && result == VK_SUCCESS)
13692 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13705 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13707 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
13709 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13711 VkMemoryRequirements vkMemReq = {};
13712 bool requiresDedicatedAllocation =
false;
13713 bool prefersDedicatedAllocation =
false;
13714 allocator->GetImageMemoryRequirements(image, vkMemReq,
13715 requiresDedicatedAllocation, prefersDedicatedAllocation);
13717 VkResult result = allocator->AllocateMemory(
13719 requiresDedicatedAllocation,
13720 prefersDedicatedAllocation,
13724 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
13727 #if VMA_RECORDING_ENABLED 13728 if(allocator->GetRecorder() != VMA_NULL)
13730 allocator->GetRecorder()->RecordAllocateMemoryForImage(
13731 allocator->GetCurrentFrameIndex(),
13733 requiresDedicatedAllocation,
13734 prefersDedicatedAllocation,
13740 if(pAllocationInfo && result == VK_SUCCESS)
13742 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13752 VMA_ASSERT(allocator);
13754 if(allocation == VK_NULL_HANDLE)
13759 VMA_DEBUG_LOG(
"vmaFreeMemory");
13761 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13763 #if VMA_RECORDING_ENABLED 13764 if(allocator->GetRecorder() != VMA_NULL)
13766 allocator->GetRecorder()->RecordFreeMemory(
13767 allocator->GetCurrentFrameIndex(),
13772 allocator->FreeMemory(allocation);
13780 VMA_ASSERT(allocator && allocation && pAllocationInfo);
13782 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13784 #if VMA_RECORDING_ENABLED 13785 if(allocator->GetRecorder() != VMA_NULL)
13787 allocator->GetRecorder()->RecordGetAllocationInfo(
13788 allocator->GetCurrentFrameIndex(),
13793 allocator->GetAllocationInfo(allocation, pAllocationInfo);
13800 VMA_ASSERT(allocator && allocation);
13802 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13804 #if VMA_RECORDING_ENABLED 13805 if(allocator->GetRecorder() != VMA_NULL)
13807 allocator->GetRecorder()->RecordTouchAllocation(
13808 allocator->GetCurrentFrameIndex(),
13813 return allocator->TouchAllocation(allocation);
13821 VMA_ASSERT(allocator && allocation);
13823 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13825 allocation->SetUserData(allocator, pUserData);
13827 #if VMA_RECORDING_ENABLED 13828 if(allocator->GetRecorder() != VMA_NULL)
13830 allocator->GetRecorder()->RecordSetAllocationUserData(
13831 allocator->GetCurrentFrameIndex(),
13842 VMA_ASSERT(allocator && pAllocation);
13844 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
13846 allocator->CreateLostAllocation(pAllocation);
13848 #if VMA_RECORDING_ENABLED 13849 if(allocator->GetRecorder() != VMA_NULL)
13851 allocator->GetRecorder()->RecordCreateLostAllocation(
13852 allocator->GetCurrentFrameIndex(),
13863 VMA_ASSERT(allocator && allocation && ppData);
13865 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13867 VkResult res = allocator->Map(allocation, ppData);
13869 #if VMA_RECORDING_ENABLED 13870 if(allocator->GetRecorder() != VMA_NULL)
13872 allocator->GetRecorder()->RecordMapMemory(
13873 allocator->GetCurrentFrameIndex(),
13885 VMA_ASSERT(allocator && allocation);
13887 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13889 #if VMA_RECORDING_ENABLED 13890 if(allocator->GetRecorder() != VMA_NULL)
13892 allocator->GetRecorder()->RecordUnmapMemory(
13893 allocator->GetCurrentFrameIndex(),
13898 allocator->Unmap(allocation);
13903 VMA_ASSERT(allocator && allocation);
13905 VMA_DEBUG_LOG(
"vmaFlushAllocation");
13907 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13909 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
13911 #if VMA_RECORDING_ENABLED 13912 if(allocator->GetRecorder() != VMA_NULL)
13914 allocator->GetRecorder()->RecordFlushAllocation(
13915 allocator->GetCurrentFrameIndex(),
13916 allocation, offset, size);
13923 VMA_ASSERT(allocator && allocation);
13925 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
13927 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13929 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
13931 #if VMA_RECORDING_ENABLED 13932 if(allocator->GetRecorder() != VMA_NULL)
13934 allocator->GetRecorder()->RecordInvalidateAllocation(
13935 allocator->GetCurrentFrameIndex(),
13936 allocation, offset, size);
13943 VMA_ASSERT(allocator);
13945 VMA_DEBUG_LOG(
"vmaCheckCorruption");
13947 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13949 return allocator->CheckCorruption(memoryTypeBits);
13955 size_t allocationCount,
13956 VkBool32* pAllocationsChanged,
13960 VMA_ASSERT(allocator && pAllocations);
13962 VMA_DEBUG_LOG(
"vmaDefragment");
13964 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13966 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
13974 VMA_ASSERT(allocator && allocation && buffer);
13976 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
13978 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13980 return allocator->BindBufferMemory(allocation, buffer);
13988 VMA_ASSERT(allocator && allocation && image);
13990 VMA_DEBUG_LOG(
"vmaBindImageMemory");
13992 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13994 return allocator->BindImageMemory(allocation, image);
13999 const VkBufferCreateInfo* pBufferCreateInfo,
14005 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
14007 VMA_DEBUG_LOG(
"vmaCreateBuffer");
14009 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14011 *pBuffer = VK_NULL_HANDLE;
14012 *pAllocation = VK_NULL_HANDLE;
14015 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
14016 allocator->m_hDevice,
14018 allocator->GetAllocationCallbacks(),
14023 VkMemoryRequirements vkMemReq = {};
14024 bool requiresDedicatedAllocation =
false;
14025 bool prefersDedicatedAllocation =
false;
14026 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
14027 requiresDedicatedAllocation, prefersDedicatedAllocation);
14031 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
14033 VMA_ASSERT(vkMemReq.alignment %
14034 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
14036 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
14038 VMA_ASSERT(vkMemReq.alignment %
14039 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
14041 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
14043 VMA_ASSERT(vkMemReq.alignment %
14044 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
14048 res = allocator->AllocateMemory(
14050 requiresDedicatedAllocation,
14051 prefersDedicatedAllocation,
14054 *pAllocationCreateInfo,
14055 VMA_SUBALLOCATION_TYPE_BUFFER,
14058 #if VMA_RECORDING_ENABLED 14059 if(allocator->GetRecorder() != VMA_NULL)
14061 allocator->GetRecorder()->RecordCreateBuffer(
14062 allocator->GetCurrentFrameIndex(),
14063 *pBufferCreateInfo,
14064 *pAllocationCreateInfo,
14072 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
14076 #if VMA_STATS_STRING_ENABLED 14077 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
14079 if(pAllocationInfo != VMA_NULL)
14081 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14086 allocator->FreeMemory(*pAllocation);
14087 *pAllocation = VK_NULL_HANDLE;
14088 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14089 *pBuffer = VK_NULL_HANDLE;
14092 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14093 *pBuffer = VK_NULL_HANDLE;
14104 VMA_ASSERT(allocator);
14106 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14111 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
14113 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14115 #if VMA_RECORDING_ENABLED 14116 if(allocator->GetRecorder() != VMA_NULL)
14118 allocator->GetRecorder()->RecordDestroyBuffer(
14119 allocator->GetCurrentFrameIndex(),
14124 if(buffer != VK_NULL_HANDLE)
14126 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
14129 if(allocation != VK_NULL_HANDLE)
14131 allocator->FreeMemory(allocation);
14137 const VkImageCreateInfo* pImageCreateInfo,
14143 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
14145 VMA_DEBUG_LOG(
"vmaCreateImage");
14147 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14149 *pImage = VK_NULL_HANDLE;
14150 *pAllocation = VK_NULL_HANDLE;
14153 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
14154 allocator->m_hDevice,
14156 allocator->GetAllocationCallbacks(),
14160 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
14161 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
14162 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
14165 VkMemoryRequirements vkMemReq = {};
14166 bool requiresDedicatedAllocation =
false;
14167 bool prefersDedicatedAllocation =
false;
14168 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
14169 requiresDedicatedAllocation, prefersDedicatedAllocation);
14171 res = allocator->AllocateMemory(
14173 requiresDedicatedAllocation,
14174 prefersDedicatedAllocation,
14177 *pAllocationCreateInfo,
14181 #if VMA_RECORDING_ENABLED 14182 if(allocator->GetRecorder() != VMA_NULL)
14184 allocator->GetRecorder()->RecordCreateImage(
14185 allocator->GetCurrentFrameIndex(),
14187 *pAllocationCreateInfo,
14195 res = allocator->BindImageMemory(*pAllocation, *pImage);
14199 #if VMA_STATS_STRING_ENABLED 14200 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
14202 if(pAllocationInfo != VMA_NULL)
14204 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14209 allocator->FreeMemory(*pAllocation);
14210 *pAllocation = VK_NULL_HANDLE;
14211 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14212 *pImage = VK_NULL_HANDLE;
14215 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14216 *pImage = VK_NULL_HANDLE;
14227 VMA_ASSERT(allocator);
14229 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14234 VMA_DEBUG_LOG(
"vmaDestroyImage");
14236 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14238 #if VMA_RECORDING_ENABLED 14239 if(allocator->GetRecorder() != VMA_NULL)
14241 allocator->GetRecorder()->RecordDestroyImage(
14242 allocator->GetCurrentFrameIndex(),
14247 if(image != VK_NULL_HANDLE)
14249 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
14251 if(allocation != VK_NULL_HANDLE)
14253 allocator->FreeMemory(allocation);
14257 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1446
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1759
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1515
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1477
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2071
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1458
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1716
Definition: vk_mem_alloc.h:1819
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1450
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2171
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1512
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2416
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1970
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1489
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2052
Definition: vk_mem_alloc.h:1796
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1439
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1869
Definition: vk_mem_alloc.h:1743
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1524
Definition: vk_mem_alloc.h:1988
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1577
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1509
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1747
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1649
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1455
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1648
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2420
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1541
VmaStatInfo total
Definition: vk_mem_alloc.h:1658
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2428
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1853
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2411
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1456
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1381
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1518
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2002
Definition: vk_mem_alloc.h:1996
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1584
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2181
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1451
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1475
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1890
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2022
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2058
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1437
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2005
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1694
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2406
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2424
Definition: vk_mem_alloc.h:1733
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1877
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1454
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1654
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1387
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:1837
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1408
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1479
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1413
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2426
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1864
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2068
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1447
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1637
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2017
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1400
Definition: vk_mem_alloc.h:1992
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1803
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1650
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1404
Definition: vk_mem_alloc.h:1827
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2008
Definition: vk_mem_alloc.h:1742
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1453
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1859
Definition: vk_mem_alloc.h:1850
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1640
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1449
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2030
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1527
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2061
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1848
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1883
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1565
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1656
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1783
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1649
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1460
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1497
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1402
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1459
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2044
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1452
Definition: vk_mem_alloc.h:1814
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1505
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2195
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1521
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1649
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1646
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2049
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
Definition: vk_mem_alloc.h:1823
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2176
Definition: vk_mem_alloc.h:1834
Definition: vk_mem_alloc.h:1846
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2422
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1445
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1644
Definition: vk_mem_alloc.h:1699
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1998
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1494
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1642
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1457
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1461
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1770
Definition: vk_mem_alloc.h:1841
Definition: vk_mem_alloc.h:1726
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2190
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1435
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1448
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:1985
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2157
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1831
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1952
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1650
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1809
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1469
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1657
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2055
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1650
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2162