23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1359 #include <vulkan/vulkan.h> 1361 #if !defined(VMA_DEDICATED_ALLOCATION) 1362 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1363 #define VMA_DEDICATED_ALLOCATION 1 1365 #define VMA_DEDICATED_ALLOCATION 0 1383 uint32_t memoryType,
1384 VkDeviceMemory memory,
1389 uint32_t memoryType,
1390 VkDeviceMemory memory,
1462 #if VMA_DEDICATED_ALLOCATION 1463 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1464 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1485 #ifndef VMA_RECORDING_ENABLED 1487 #define VMA_RECORDING_ENABLED 1 1489 #define VMA_RECORDING_ENABLED 0 1602 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1610 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1620 uint32_t memoryTypeIndex,
1621 VkMemoryPropertyFlags* pFlags);
1633 uint32_t frameIndex);
1666 #define VMA_STATS_STRING_ENABLED 1 1668 #if VMA_STATS_STRING_ENABLED 1675 char** ppStatsString,
1676 VkBool32 detailedMap);
1680 char* pStatsString);
1682 #endif // #if VMA_STATS_STRING_ENABLED 1881 uint32_t memoryTypeBits,
1883 uint32_t* pMemoryTypeIndex);
1899 const VkBufferCreateInfo* pBufferCreateInfo,
1901 uint32_t* pMemoryTypeIndex);
1917 const VkImageCreateInfo* pImageCreateInfo,
1919 uint32_t* pMemoryTypeIndex);
2072 size_t* pLostAllocationCount);
2171 const VkMemoryRequirements* pVkMemoryRequirements,
2481 size_t allocationCount,
2482 VkBool32* pAllocationsChanged,
2548 const VkBufferCreateInfo* pBufferCreateInfo,
2573 const VkImageCreateInfo* pImageCreateInfo,
2599 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2602 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2603 #define VMA_IMPLEMENTATION 2606 #ifdef VMA_IMPLEMENTATION 2607 #undef VMA_IMPLEMENTATION 2629 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2630 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2642 #if VMA_USE_STL_CONTAINERS 2643 #define VMA_USE_STL_VECTOR 1 2644 #define VMA_USE_STL_UNORDERED_MAP 1 2645 #define VMA_USE_STL_LIST 1 2648 #if VMA_USE_STL_VECTOR 2652 #if VMA_USE_STL_UNORDERED_MAP 2653 #include <unordered_map> 2656 #if VMA_USE_STL_LIST 2665 #include <algorithm> 2671 #define VMA_NULL nullptr 2674 #if defined(__APPLE__) || defined(__ANDROID__) 2676 void *aligned_alloc(
size_t alignment,
size_t size)
2679 if(alignment <
sizeof(
void*))
2681 alignment =
sizeof(
void*);
2685 if(posix_memalign(&pointer, alignment, size) == 0)
2699 #define VMA_ASSERT(expr) assert(expr) 2701 #define VMA_ASSERT(expr) 2707 #ifndef VMA_HEAVY_ASSERT 2709 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2711 #define VMA_HEAVY_ASSERT(expr) 2715 #ifndef VMA_ALIGN_OF 2716 #define VMA_ALIGN_OF(type) (__alignof(type)) 2719 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2721 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2723 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2727 #ifndef VMA_SYSTEM_FREE 2729 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2731 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2736 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2740 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2744 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2748 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2751 #ifndef VMA_DEBUG_LOG 2752 #define VMA_DEBUG_LOG(format, ...) 2762 #if VMA_STATS_STRING_ENABLED 2763 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2765 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2767 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2769 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2771 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2773 snprintf(outStr, strLen,
"%p", ptr);
2783 void Lock() { m_Mutex.lock(); }
2784 void Unlock() { m_Mutex.unlock(); }
2788 #define VMA_MUTEX VmaMutex 2799 #ifndef VMA_ATOMIC_UINT32 2800 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2803 #ifndef VMA_BEST_FIT 2816 #define VMA_BEST_FIT (1) 2819 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2824 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2827 #ifndef VMA_DEBUG_ALIGNMENT 2832 #define VMA_DEBUG_ALIGNMENT (1) 2835 #ifndef VMA_DEBUG_MARGIN 2840 #define VMA_DEBUG_MARGIN (0) 2843 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2848 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2851 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2857 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2860 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2865 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2868 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2873 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2876 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2877 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2881 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2882 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2886 #ifndef VMA_CLASS_NO_COPY 2887 #define VMA_CLASS_NO_COPY(className) \ 2889 className(const className&) = delete; \ 2890 className& operator=(const className&) = delete; 2893 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2896 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
2898 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
2899 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
2905 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2906 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2909 static inline uint32_t VmaCountBitsSet(uint32_t v)
2911 uint32_t c = v - ((v >> 1) & 0x55555555);
2912 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2913 c = ((c >> 4) + c) & 0x0F0F0F0F;
2914 c = ((c >> 8) + c) & 0x00FF00FF;
2915 c = ((c >> 16) + c) & 0x0000FFFF;
2921 template <
typename T>
2922 static inline T VmaAlignUp(T val, T align)
2924 return (val + align - 1) / align * align;
2928 template <
typename T>
2929 static inline T VmaAlignDown(T val, T align)
2931 return val / align * align;
2935 template <
typename T>
2936 inline T VmaRoundDiv(T x, T y)
2938 return (x + (y / (T)2)) / y;
2941 static inline bool VmaStrIsEmpty(
const char* pStr)
2943 return pStr == VMA_NULL || *pStr ==
'\0';
2948 template<
typename Iterator,
typename Compare>
2949 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2951 Iterator centerValue = end; --centerValue;
2952 Iterator insertIndex = beg;
2953 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2955 if(cmp(*memTypeIndex, *centerValue))
2957 if(insertIndex != memTypeIndex)
2959 VMA_SWAP(*memTypeIndex, *insertIndex);
2964 if(insertIndex != centerValue)
2966 VMA_SWAP(*insertIndex, *centerValue);
2971 template<
typename Iterator,
typename Compare>
2972 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2976 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2977 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2978 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2982 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2984 #endif // #ifndef VMA_SORT 2993 static inline bool VmaBlocksOnSamePage(
2994 VkDeviceSize resourceAOffset,
2995 VkDeviceSize resourceASize,
2996 VkDeviceSize resourceBOffset,
2997 VkDeviceSize pageSize)
2999 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3000 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3001 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3002 VkDeviceSize resourceBStart = resourceBOffset;
3003 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3004 return resourceAEndPage == resourceBStartPage;
3007 enum VmaSuballocationType
3009 VMA_SUBALLOCATION_TYPE_FREE = 0,
3010 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3011 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3012 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3013 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3014 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3015 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3024 static inline bool VmaIsBufferImageGranularityConflict(
3025 VmaSuballocationType suballocType1,
3026 VmaSuballocationType suballocType2)
3028 if(suballocType1 > suballocType2)
3030 VMA_SWAP(suballocType1, suballocType2);
3033 switch(suballocType1)
3035 case VMA_SUBALLOCATION_TYPE_FREE:
3037 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3039 case VMA_SUBALLOCATION_TYPE_BUFFER:
3041 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3042 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3043 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3045 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3046 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3047 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3048 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3050 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3051 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3059 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3061 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3062 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3063 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3065 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3069 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3071 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3072 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3073 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3075 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3086 VMA_CLASS_NO_COPY(VmaMutexLock)
3088 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3089 m_pMutex(useMutex ? &mutex : VMA_NULL)
3106 VMA_MUTEX* m_pMutex;
3109 #if VMA_DEBUG_GLOBAL_MUTEX 3110 static VMA_MUTEX gDebugGlobalMutex;
3111 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3113 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3117 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3128 template <
typename CmpLess,
typename IterT,
typename KeyT>
3129 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3131 size_t down = 0, up = (end - beg);
3134 const size_t mid = (down + up) / 2;
3135 if(cmp(*(beg+mid), key))
3150 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3152 if((pAllocationCallbacks != VMA_NULL) &&
3153 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3155 return (*pAllocationCallbacks->pfnAllocation)(
3156 pAllocationCallbacks->pUserData,
3159 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3163 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3167 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3169 if((pAllocationCallbacks != VMA_NULL) &&
3170 (pAllocationCallbacks->pfnFree != VMA_NULL))
3172 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3176 VMA_SYSTEM_FREE(ptr);
3180 template<
typename T>
3181 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3183 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3186 template<
typename T>
3187 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3189 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3192 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3194 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3196 template<
typename T>
3197 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3200 VmaFree(pAllocationCallbacks, ptr);
3203 template<
typename T>
3204 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3208 for(
size_t i = count; i--; )
3212 VmaFree(pAllocationCallbacks, ptr);
3217 template<
typename T>
3218 class VmaStlAllocator
3221 const VkAllocationCallbacks*
const m_pCallbacks;
3222 typedef T value_type;
3224 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3225 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3227 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3228 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3230 template<
typename U>
3231 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3233 return m_pCallbacks == rhs.m_pCallbacks;
3235 template<
typename U>
3236 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3238 return m_pCallbacks != rhs.m_pCallbacks;
3241 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3244 #if VMA_USE_STL_VECTOR 3246 #define VmaVector std::vector 3248 template<
typename T,
typename allocatorT>
3249 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3251 vec.insert(vec.begin() + index, item);
3254 template<
typename T,
typename allocatorT>
3255 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3257 vec.erase(vec.begin() + index);
3260 #else // #if VMA_USE_STL_VECTOR 3265 template<
typename T,
typename AllocatorT>
3269 typedef T value_type;
3271 VmaVector(
const AllocatorT& allocator) :
3272 m_Allocator(allocator),
3279 VmaVector(
size_t count,
const AllocatorT& allocator) :
3280 m_Allocator(allocator),
3281 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3287 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3288 m_Allocator(src.m_Allocator),
3289 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3290 m_Count(src.m_Count),
3291 m_Capacity(src.m_Count)
3295 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3301 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3304 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3308 resize(rhs.m_Count);
3311 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3317 bool empty()
const {
return m_Count == 0; }
3318 size_t size()
const {
return m_Count; }
3319 T* data() {
return m_pArray; }
3320 const T* data()
const {
return m_pArray; }
3322 T& operator[](
size_t index)
3324 VMA_HEAVY_ASSERT(index < m_Count);
3325 return m_pArray[index];
3327 const T& operator[](
size_t index)
const 3329 VMA_HEAVY_ASSERT(index < m_Count);
3330 return m_pArray[index];
3335 VMA_HEAVY_ASSERT(m_Count > 0);
3338 const T& front()
const 3340 VMA_HEAVY_ASSERT(m_Count > 0);
3345 VMA_HEAVY_ASSERT(m_Count > 0);
3346 return m_pArray[m_Count - 1];
3348 const T& back()
const 3350 VMA_HEAVY_ASSERT(m_Count > 0);
3351 return m_pArray[m_Count - 1];
3354 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3356 newCapacity = VMA_MAX(newCapacity, m_Count);
3358 if((newCapacity < m_Capacity) && !freeMemory)
3360 newCapacity = m_Capacity;
3363 if(newCapacity != m_Capacity)
3365 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3368 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3370 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3371 m_Capacity = newCapacity;
3372 m_pArray = newArray;
3376 void resize(
size_t newCount,
bool freeMemory =
false)
3378 size_t newCapacity = m_Capacity;
3379 if(newCount > m_Capacity)
3381 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3385 newCapacity = newCount;
3388 if(newCapacity != m_Capacity)
3390 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3391 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3392 if(elementsToCopy != 0)
3394 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3396 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3397 m_Capacity = newCapacity;
3398 m_pArray = newArray;
3404 void clear(
bool freeMemory =
false)
3406 resize(0, freeMemory);
3409 void insert(
size_t index,
const T& src)
3411 VMA_HEAVY_ASSERT(index <= m_Count);
3412 const size_t oldCount = size();
3413 resize(oldCount + 1);
3414 if(index < oldCount)
3416 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3418 m_pArray[index] = src;
3421 void remove(
size_t index)
3423 VMA_HEAVY_ASSERT(index < m_Count);
3424 const size_t oldCount = size();
3425 if(index < oldCount - 1)
3427 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3429 resize(oldCount - 1);
3432 void push_back(
const T& src)
3434 const size_t newIndex = size();
3435 resize(newIndex + 1);
3436 m_pArray[newIndex] = src;
3441 VMA_HEAVY_ASSERT(m_Count > 0);
3445 void push_front(
const T& src)
3452 VMA_HEAVY_ASSERT(m_Count > 0);
3456 typedef T* iterator;
3458 iterator begin() {
return m_pArray; }
3459 iterator end() {
return m_pArray + m_Count; }
3462 AllocatorT m_Allocator;
3468 template<
typename T,
typename allocatorT>
3469 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3471 vec.insert(index, item);
3474 template<
typename T,
typename allocatorT>
3475 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3480 #endif // #if VMA_USE_STL_VECTOR 3482 template<
typename CmpLess,
typename VectorT>
3483 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3485 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3487 vector.data() + vector.size(),
3489 CmpLess()) - vector.data();
3490 VmaVectorInsert(vector, indexToInsert, value);
3491 return indexToInsert;
3494 template<
typename CmpLess,
typename VectorT>
3495 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3498 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3503 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3505 size_t indexToRemove = it - vector.begin();
3506 VmaVectorRemove(vector, indexToRemove);
3512 template<
typename CmpLess,
typename IterT,
typename KeyT>
3513 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3516 typename IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3517 beg, end, value, comparator);
3519 !comparator(*it, value) && !comparator(value, *it))
3534 template<
typename T>
3535 class VmaPoolAllocator
3537 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3539 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3540 ~VmaPoolAllocator();
3548 uint32_t NextFreeIndex;
3555 uint32_t FirstFreeIndex;
3558 const VkAllocationCallbacks* m_pAllocationCallbacks;
3559 size_t m_ItemsPerBlock;
3560 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3562 ItemBlock& CreateNewBlock();
3565 template<
typename T>
3566 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3567 m_pAllocationCallbacks(pAllocationCallbacks),
3568 m_ItemsPerBlock(itemsPerBlock),
3569 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3571 VMA_ASSERT(itemsPerBlock > 0);
3574 template<
typename T>
3575 VmaPoolAllocator<T>::~VmaPoolAllocator()
3580 template<
typename T>
3581 void VmaPoolAllocator<T>::Clear()
3583 for(
size_t i = m_ItemBlocks.size(); i--; )
3584 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3585 m_ItemBlocks.clear();
3588 template<
typename T>
3589 T* VmaPoolAllocator<T>::Alloc()
3591 for(
size_t i = m_ItemBlocks.size(); i--; )
3593 ItemBlock& block = m_ItemBlocks[i];
3595 if(block.FirstFreeIndex != UINT32_MAX)
3597 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3598 block.FirstFreeIndex = pItem->NextFreeIndex;
3599 return &pItem->Value;
3604 ItemBlock& newBlock = CreateNewBlock();
3605 Item*
const pItem = &newBlock.pItems[0];
3606 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3607 return &pItem->Value;
3610 template<
typename T>
3611 void VmaPoolAllocator<T>::Free(T* ptr)
3614 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3616 ItemBlock& block = m_ItemBlocks[i];
3620 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3623 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3625 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3626 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3627 block.FirstFreeIndex = index;
3631 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3634 template<
typename T>
3635 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3637 ItemBlock newBlock = {
3638 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3640 m_ItemBlocks.push_back(newBlock);
3643 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3644 newBlock.pItems[i].NextFreeIndex = i + 1;
3645 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3646 return m_ItemBlocks.back();
3652 #if VMA_USE_STL_LIST 3654 #define VmaList std::list 3656 #else // #if VMA_USE_STL_LIST 3658 template<
typename T>
3667 template<
typename T>
3670 VMA_CLASS_NO_COPY(VmaRawList)
3672 typedef VmaListItem<T> ItemType;
3674 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3678 size_t GetCount()
const {
return m_Count; }
3679 bool IsEmpty()
const {
return m_Count == 0; }
3681 ItemType* Front() {
return m_pFront; }
3682 const ItemType* Front()
const {
return m_pFront; }
3683 ItemType* Back() {
return m_pBack; }
3684 const ItemType* Back()
const {
return m_pBack; }
3686 ItemType* PushBack();
3687 ItemType* PushFront();
3688 ItemType* PushBack(
const T& value);
3689 ItemType* PushFront(
const T& value);
3694 ItemType* InsertBefore(ItemType* pItem);
3696 ItemType* InsertAfter(ItemType* pItem);
3698 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3699 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3701 void Remove(ItemType* pItem);
3704 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3705 VmaPoolAllocator<ItemType> m_ItemAllocator;
3711 template<
typename T>
3712 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3713 m_pAllocationCallbacks(pAllocationCallbacks),
3714 m_ItemAllocator(pAllocationCallbacks, 128),
3721 template<
typename T>
3722 VmaRawList<T>::~VmaRawList()
3728 template<
typename T>
3729 void VmaRawList<T>::Clear()
3731 if(IsEmpty() ==
false)
3733 ItemType* pItem = m_pBack;
3734 while(pItem != VMA_NULL)
3736 ItemType*
const pPrevItem = pItem->pPrev;
3737 m_ItemAllocator.Free(pItem);
3740 m_pFront = VMA_NULL;
3746 template<
typename T>
3747 VmaListItem<T>* VmaRawList<T>::PushBack()
3749 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3750 pNewItem->pNext = VMA_NULL;
3753 pNewItem->pPrev = VMA_NULL;
3754 m_pFront = pNewItem;
3760 pNewItem->pPrev = m_pBack;
3761 m_pBack->pNext = pNewItem;
3768 template<
typename T>
3769 VmaListItem<T>* VmaRawList<T>::PushFront()
3771 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3772 pNewItem->pPrev = VMA_NULL;
3775 pNewItem->pNext = VMA_NULL;
3776 m_pFront = pNewItem;
3782 pNewItem->pNext = m_pFront;
3783 m_pFront->pPrev = pNewItem;
3784 m_pFront = pNewItem;
3790 template<
typename T>
3791 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3793 ItemType*
const pNewItem = PushBack();
3794 pNewItem->Value = value;
3798 template<
typename T>
3799 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3801 ItemType*
const pNewItem = PushFront();
3802 pNewItem->Value = value;
3806 template<
typename T>
3807 void VmaRawList<T>::PopBack()
3809 VMA_HEAVY_ASSERT(m_Count > 0);
3810 ItemType*
const pBackItem = m_pBack;
3811 ItemType*
const pPrevItem = pBackItem->pPrev;
3812 if(pPrevItem != VMA_NULL)
3814 pPrevItem->pNext = VMA_NULL;
3816 m_pBack = pPrevItem;
3817 m_ItemAllocator.Free(pBackItem);
3821 template<
typename T>
3822 void VmaRawList<T>::PopFront()
3824 VMA_HEAVY_ASSERT(m_Count > 0);
3825 ItemType*
const pFrontItem = m_pFront;
3826 ItemType*
const pNextItem = pFrontItem->pNext;
3827 if(pNextItem != VMA_NULL)
3829 pNextItem->pPrev = VMA_NULL;
3831 m_pFront = pNextItem;
3832 m_ItemAllocator.Free(pFrontItem);
3836 template<
typename T>
3837 void VmaRawList<T>::Remove(ItemType* pItem)
3839 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3840 VMA_HEAVY_ASSERT(m_Count > 0);
3842 if(pItem->pPrev != VMA_NULL)
3844 pItem->pPrev->pNext = pItem->pNext;
3848 VMA_HEAVY_ASSERT(m_pFront == pItem);
3849 m_pFront = pItem->pNext;
3852 if(pItem->pNext != VMA_NULL)
3854 pItem->pNext->pPrev = pItem->pPrev;
3858 VMA_HEAVY_ASSERT(m_pBack == pItem);
3859 m_pBack = pItem->pPrev;
3862 m_ItemAllocator.Free(pItem);
3866 template<
typename T>
3867 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3869 if(pItem != VMA_NULL)
3871 ItemType*
const prevItem = pItem->pPrev;
3872 ItemType*
const newItem = m_ItemAllocator.Alloc();
3873 newItem->pPrev = prevItem;
3874 newItem->pNext = pItem;
3875 pItem->pPrev = newItem;
3876 if(prevItem != VMA_NULL)
3878 prevItem->pNext = newItem;
3882 VMA_HEAVY_ASSERT(m_pFront == pItem);
3892 template<
typename T>
3893 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3895 if(pItem != VMA_NULL)
3897 ItemType*
const nextItem = pItem->pNext;
3898 ItemType*
const newItem = m_ItemAllocator.Alloc();
3899 newItem->pNext = nextItem;
3900 newItem->pPrev = pItem;
3901 pItem->pNext = newItem;
3902 if(nextItem != VMA_NULL)
3904 nextItem->pPrev = newItem;
3908 VMA_HEAVY_ASSERT(m_pBack == pItem);
3918 template<
typename T>
3919 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3921 ItemType*
const newItem = InsertBefore(pItem);
3922 newItem->Value = value;
3926 template<
typename T>
3927 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3929 ItemType*
const newItem = InsertAfter(pItem);
3930 newItem->Value = value;
3934 template<
typename T,
typename AllocatorT>
3937 VMA_CLASS_NO_COPY(VmaList)
3948 T& operator*()
const 3950 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3951 return m_pItem->Value;
3953 T* operator->()
const 3955 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3956 return &m_pItem->Value;
3959 iterator& operator++()
3961 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3962 m_pItem = m_pItem->pNext;
3965 iterator& operator--()
3967 if(m_pItem != VMA_NULL)
3969 m_pItem = m_pItem->pPrev;
3973 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3974 m_pItem = m_pList->Back();
3979 iterator operator++(
int)
3981 iterator result = *
this;
3985 iterator operator--(
int)
3987 iterator result = *
this;
3992 bool operator==(
const iterator& rhs)
const 3994 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3995 return m_pItem == rhs.m_pItem;
3997 bool operator!=(
const iterator& rhs)
const 3999 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4000 return m_pItem != rhs.m_pItem;
4004 VmaRawList<T>* m_pList;
4005 VmaListItem<T>* m_pItem;
4007 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4013 friend class VmaList<T, AllocatorT>;
4016 class const_iterator
4025 const_iterator(
const iterator& src) :
4026 m_pList(src.m_pList),
4027 m_pItem(src.m_pItem)
4031 const T& operator*()
const 4033 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4034 return m_pItem->Value;
4036 const T* operator->()
const 4038 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4039 return &m_pItem->Value;
4042 const_iterator& operator++()
4044 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4045 m_pItem = m_pItem->pNext;
4048 const_iterator& operator--()
4050 if(m_pItem != VMA_NULL)
4052 m_pItem = m_pItem->pPrev;
4056 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4057 m_pItem = m_pList->Back();
4062 const_iterator operator++(
int)
4064 const_iterator result = *
this;
4068 const_iterator operator--(
int)
4070 const_iterator result = *
this;
4075 bool operator==(
const const_iterator& rhs)
const 4077 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4078 return m_pItem == rhs.m_pItem;
4080 bool operator!=(
const const_iterator& rhs)
const 4082 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4083 return m_pItem != rhs.m_pItem;
4087 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4093 const VmaRawList<T>* m_pList;
4094 const VmaListItem<T>* m_pItem;
4096 friend class VmaList<T, AllocatorT>;
4099 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4101 bool empty()
const {
return m_RawList.IsEmpty(); }
4102 size_t size()
const {
return m_RawList.GetCount(); }
4104 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4105 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4107 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4108 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4110 void clear() { m_RawList.Clear(); }
4111 void push_back(
const T& value) { m_RawList.PushBack(value); }
4112 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4113 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4116 VmaRawList<T> m_RawList;
4119 #endif // #if VMA_USE_STL_LIST 4127 #if VMA_USE_STL_UNORDERED_MAP 4129 #define VmaPair std::pair 4131 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4132 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4134 #else // #if VMA_USE_STL_UNORDERED_MAP 4136 template<
typename T1,
typename T2>
4142 VmaPair() : first(), second() { }
4143 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4149 template<
typename KeyT,
typename ValueT>
4153 typedef VmaPair<KeyT, ValueT> PairType;
4154 typedef PairType* iterator;
4156 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4158 iterator begin() {
return m_Vector.begin(); }
4159 iterator end() {
return m_Vector.end(); }
4161 void insert(
const PairType& pair);
4162 iterator find(
const KeyT& key);
4163 void erase(iterator it);
4166 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4169 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4171 template<
typename FirstT,
typename SecondT>
4172 struct VmaPairFirstLess
4174 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4176 return lhs.first < rhs.first;
4178 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4180 return lhs.first < rhsFirst;
4184 template<
typename KeyT,
typename ValueT>
4185 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4187 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4189 m_Vector.data() + m_Vector.size(),
4191 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4192 VmaVectorInsert(m_Vector, indexToInsert, pair);
4195 template<
typename KeyT,
typename ValueT>
4196 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4198 PairType* it = VmaBinaryFindFirstNotLess(
4200 m_Vector.data() + m_Vector.size(),
4202 VmaPairFirstLess<KeyT, ValueT>());
4203 if((it != m_Vector.end()) && (it->first == key))
4209 return m_Vector.end();
4213 template<
typename KeyT,
typename ValueT>
4214 void VmaMap<KeyT, ValueT>::erase(iterator it)
4216 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4219 #endif // #if VMA_USE_STL_UNORDERED_MAP 4225 class VmaDeviceMemoryBlock;
4227 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4229 struct VmaAllocation_T
4231 VMA_CLASS_NO_COPY(VmaAllocation_T)
4233 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4237 FLAG_USER_DATA_STRING = 0x01,
4241 enum ALLOCATION_TYPE
4243 ALLOCATION_TYPE_NONE,
4244 ALLOCATION_TYPE_BLOCK,
4245 ALLOCATION_TYPE_DEDICATED,
4248 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4251 m_pUserData(VMA_NULL),
4252 m_LastUseFrameIndex(currentFrameIndex),
4253 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4254 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4256 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4258 #if VMA_STATS_STRING_ENABLED 4259 m_CreationFrameIndex = currentFrameIndex;
4260 m_BufferImageUsage = 0;
4266 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4269 VMA_ASSERT(m_pUserData == VMA_NULL);
4272 void InitBlockAllocation(
4274 VmaDeviceMemoryBlock* block,
4275 VkDeviceSize offset,
4276 VkDeviceSize alignment,
4278 VmaSuballocationType suballocationType,
4282 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4283 VMA_ASSERT(block != VMA_NULL);
4284 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4285 m_Alignment = alignment;
4287 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4288 m_SuballocationType = (uint8_t)suballocationType;
4289 m_BlockAllocation.m_hPool = hPool;
4290 m_BlockAllocation.m_Block = block;
4291 m_BlockAllocation.m_Offset = offset;
4292 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4297 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4298 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4299 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4300 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4301 m_BlockAllocation.m_Block = VMA_NULL;
4302 m_BlockAllocation.m_Offset = 0;
4303 m_BlockAllocation.m_CanBecomeLost =
true;
4306 void ChangeBlockAllocation(
4308 VmaDeviceMemoryBlock* block,
4309 VkDeviceSize offset);
4312 void InitDedicatedAllocation(
4313 uint32_t memoryTypeIndex,
4314 VkDeviceMemory hMemory,
4315 VmaSuballocationType suballocationType,
4319 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4320 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4321 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4324 m_SuballocationType = (uint8_t)suballocationType;
4325 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4326 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4327 m_DedicatedAllocation.m_hMemory = hMemory;
4328 m_DedicatedAllocation.m_pMappedData = pMappedData;
4331 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4332 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4333 VkDeviceSize GetSize()
const {
return m_Size; }
4334 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4335 void* GetUserData()
const {
return m_pUserData; }
4336 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4337 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4339 VmaDeviceMemoryBlock* GetBlock()
const 4341 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4342 return m_BlockAllocation.m_Block;
4344 VkDeviceSize GetOffset()
const;
4345 VkDeviceMemory GetMemory()
const;
4346 uint32_t GetMemoryTypeIndex()
const;
4347 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4348 void* GetMappedData()
const;
4349 bool CanBecomeLost()
const;
4352 uint32_t GetLastUseFrameIndex()
const 4354 return m_LastUseFrameIndex.load();
4356 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4358 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4368 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4370 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4372 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4383 void BlockAllocMap();
4384 void BlockAllocUnmap();
4385 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4388 #if VMA_STATS_STRING_ENABLED 4389 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4390 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4392 void InitBufferImageUsage(uint32_t bufferImageUsage)
4394 VMA_ASSERT(m_BufferImageUsage == 0);
4395 m_BufferImageUsage = bufferImageUsage;
4398 void PrintParameters(
class VmaJsonWriter& json)
const;
4402 VkDeviceSize m_Alignment;
4403 VkDeviceSize m_Size;
4405 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4407 uint8_t m_SuballocationType;
4414 struct BlockAllocation
4417 VmaDeviceMemoryBlock* m_Block;
4418 VkDeviceSize m_Offset;
4419 bool m_CanBecomeLost;
4423 struct DedicatedAllocation
4425 uint32_t m_MemoryTypeIndex;
4426 VkDeviceMemory m_hMemory;
4427 void* m_pMappedData;
4433 BlockAllocation m_BlockAllocation;
4435 DedicatedAllocation m_DedicatedAllocation;
4438 #if VMA_STATS_STRING_ENABLED 4439 uint32_t m_CreationFrameIndex;
4440 uint32_t m_BufferImageUsage;
4450 struct VmaSuballocation
4452 VkDeviceSize offset;
4455 VmaSuballocationType type;
4459 struct VmaSuballocationOffsetLess
4461 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4463 return lhs.offset < rhs.offset;
4466 struct VmaSuballocationOffsetGreater
4468 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4470 return lhs.offset > rhs.offset;
4474 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4477 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4492 struct VmaAllocationRequest
4494 VkDeviceSize offset;
4495 VkDeviceSize sumFreeSize;
4496 VkDeviceSize sumItemSize;
4497 VmaSuballocationList::iterator item;
4498 size_t itemsToMakeLostCount;
4500 VkDeviceSize CalcCost()
const 4502 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4510 class VmaBlockMetadata
4513 VmaBlockMetadata() : m_Size(0) { }
4514 virtual ~VmaBlockMetadata() { }
4515 virtual void Init(VkDeviceSize size) { m_Size = size; }
4518 virtual bool Validate()
const = 0;
4519 VkDeviceSize GetSize()
const {
return m_Size; }
4520 virtual size_t GetAllocationCount()
const = 0;
4521 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4522 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4524 virtual bool IsEmpty()
const = 0;
4526 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4528 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4530 #if VMA_STATS_STRING_ENABLED 4531 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4537 virtual bool CreateAllocationRequest(
4538 uint32_t currentFrameIndex,
4539 uint32_t frameInUseCount,
4540 VkDeviceSize bufferImageGranularity,
4541 VkDeviceSize allocSize,
4542 VkDeviceSize allocAlignment,
4544 VmaSuballocationType allocType,
4545 bool canMakeOtherLost,
4546 VmaAllocationRequest* pAllocationRequest) = 0;
4548 virtual bool MakeRequestedAllocationsLost(
4549 uint32_t currentFrameIndex,
4550 uint32_t frameInUseCount,
4551 VmaAllocationRequest* pAllocationRequest) = 0;
4553 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4555 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4559 const VmaAllocationRequest& request,
4560 VmaSuballocationType type,
4561 VkDeviceSize allocSize,
4567 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4570 #if VMA_STATS_STRING_ENABLED 4571 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4572 VkDeviceSize unusedBytes,
4573 size_t allocationCount,
4574 size_t unusedRangeCount)
const;
4575 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4576 VkDeviceSize offset,
4578 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4579 VkDeviceSize offset,
4580 VkDeviceSize size)
const;
4581 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4585 VkDeviceSize m_Size;
4588 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4590 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4593 virtual ~VmaBlockMetadata_Generic();
4594 virtual void Init(VkDeviceSize size);
4596 virtual bool Validate()
const;
4597 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4598 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4599 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4600 virtual bool IsEmpty()
const;
4602 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4603 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4605 #if VMA_STATS_STRING_ENABLED 4606 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4609 virtual bool CreateAllocationRequest(
4610 uint32_t currentFrameIndex,
4611 uint32_t frameInUseCount,
4612 VkDeviceSize bufferImageGranularity,
4613 VkDeviceSize allocSize,
4614 VkDeviceSize allocAlignment,
4616 VmaSuballocationType allocType,
4617 bool canMakeOtherLost,
4618 VmaAllocationRequest* pAllocationRequest);
4620 virtual bool MakeRequestedAllocationsLost(
4621 uint32_t currentFrameIndex,
4622 uint32_t frameInUseCount,
4623 VmaAllocationRequest* pAllocationRequest);
4625 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4627 virtual VkResult CheckCorruption(
const void* pBlockData);
4630 const VmaAllocationRequest& request,
4631 VmaSuballocationType type,
4632 VkDeviceSize allocSize,
4637 virtual void FreeAtOffset(VkDeviceSize offset);
4640 uint32_t m_FreeCount;
4641 VkDeviceSize m_SumFreeSize;
4642 VmaSuballocationList m_Suballocations;
4645 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4647 bool ValidateFreeSuballocationList()
const;
4651 bool CheckAllocation(
4652 uint32_t currentFrameIndex,
4653 uint32_t frameInUseCount,
4654 VkDeviceSize bufferImageGranularity,
4655 VkDeviceSize allocSize,
4656 VkDeviceSize allocAlignment,
4657 VmaSuballocationType allocType,
4658 VmaSuballocationList::const_iterator suballocItem,
4659 bool canMakeOtherLost,
4660 VkDeviceSize* pOffset,
4661 size_t* itemsToMakeLostCount,
4662 VkDeviceSize* pSumFreeSize,
4663 VkDeviceSize* pSumItemSize)
const;
4665 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4669 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4672 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4675 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4756 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
4758 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
4761 virtual ~VmaBlockMetadata_Linear();
4762 virtual void Init(VkDeviceSize size);
4764 virtual bool Validate()
const;
4765 virtual size_t GetAllocationCount()
const;
4766 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4767 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4768 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
4770 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4771 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4773 #if VMA_STATS_STRING_ENABLED 4774 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4777 virtual bool CreateAllocationRequest(
4778 uint32_t currentFrameIndex,
4779 uint32_t frameInUseCount,
4780 VkDeviceSize bufferImageGranularity,
4781 VkDeviceSize allocSize,
4782 VkDeviceSize allocAlignment,
4784 VmaSuballocationType allocType,
4785 bool canMakeOtherLost,
4786 VmaAllocationRequest* pAllocationRequest);
4788 virtual bool MakeRequestedAllocationsLost(
4789 uint32_t currentFrameIndex,
4790 uint32_t frameInUseCount,
4791 VmaAllocationRequest* pAllocationRequest);
4793 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4795 virtual VkResult CheckCorruption(
const void* pBlockData);
4798 const VmaAllocationRequest& request,
4799 VmaSuballocationType type,
4800 VkDeviceSize allocSize,
4805 virtual void FreeAtOffset(VkDeviceSize offset);
4815 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
4817 enum SECOND_VECTOR_MODE
4819 SECOND_VECTOR_EMPTY,
4824 SECOND_VECTOR_RING_BUFFER,
4830 SECOND_VECTOR_DOUBLE_STACK,
4833 VkDeviceSize m_SumFreeSize;
4834 SuballocationVectorType m_Suballocations0, m_Suballocations1;
4835 uint32_t m_1stVectorIndex;
4836 SECOND_VECTOR_MODE m_2ndVectorMode;
4838 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
4839 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
4840 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
4841 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
4844 size_t m_1stNullItemsBeginCount;
4846 size_t m_1stNullItemsMiddleCount;
4848 size_t m_2ndNullItemsCount;
4850 bool ShouldCompact1st()
const;
4851 void CleanupAfterFree();
4860 class VmaDeviceMemoryBlock
4862 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
4864 VmaBlockMetadata* m_pMetadata;
4868 ~VmaDeviceMemoryBlock()
4870 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
4871 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4877 uint32_t newMemoryTypeIndex,
4878 VkDeviceMemory newMemory,
4879 VkDeviceSize newSize,
4881 bool linearAlgorithm);
4885 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
4886 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4887 uint32_t GetId()
const {
return m_Id; }
4888 void* GetMappedData()
const {
return m_pMappedData; }
4891 bool Validate()
const;
4896 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
4899 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
4900 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
4902 VkResult BindBufferMemory(
4906 VkResult BindImageMemory(
4912 uint32_t m_MemoryTypeIndex;
4914 VkDeviceMemory m_hMemory;
4919 uint32_t m_MapCount;
4920 void* m_pMappedData;
4923 struct VmaPointerLess
4925 bool operator()(
const void* lhs,
const void* rhs)
const 4931 class VmaDefragmentator;
4939 struct VmaBlockVector
4941 VMA_CLASS_NO_COPY(VmaBlockVector)
4945 uint32_t memoryTypeIndex,
4946 VkDeviceSize preferredBlockSize,
4947 size_t minBlockCount,
4948 size_t maxBlockCount,
4949 VkDeviceSize bufferImageGranularity,
4950 uint32_t frameInUseCount,
4952 bool explicitBlockSize,
4953 bool linearAlgorithm);
4956 VkResult CreateMinBlocks();
4958 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4959 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
4960 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
4961 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
4962 bool UsesLinearAlgorithm()
const {
return m_LinearAlgorithm; }
4966 bool IsEmpty()
const {
return m_Blocks.empty(); }
4967 bool IsCorruptionDetectionEnabled()
const;
4971 uint32_t currentFrameIndex,
4973 VkDeviceSize alignment,
4975 VmaSuballocationType suballocType,
4984 #if VMA_STATS_STRING_ENABLED 4985 void PrintDetailedMap(
class VmaJsonWriter& json);
4988 void MakePoolAllocationsLost(
4989 uint32_t currentFrameIndex,
4990 size_t* pLostAllocationCount);
4991 VkResult CheckCorruption();
4993 VmaDefragmentator* EnsureDefragmentator(
4995 uint32_t currentFrameIndex);
4997 VkResult Defragment(
4999 VkDeviceSize& maxBytesToMove,
5000 uint32_t& maxAllocationsToMove);
5002 void DestroyDefragmentator();
5005 friend class VmaDefragmentator;
5008 const uint32_t m_MemoryTypeIndex;
5009 const VkDeviceSize m_PreferredBlockSize;
5010 const size_t m_MinBlockCount;
5011 const size_t m_MaxBlockCount;
5012 const VkDeviceSize m_BufferImageGranularity;
5013 const uint32_t m_FrameInUseCount;
5014 const bool m_IsCustomPool;
5015 const bool m_ExplicitBlockSize;
5016 const bool m_LinearAlgorithm;
5017 bool m_HasEmptyBlock;
5020 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5024 VmaDefragmentator* m_pDefragmentator;
5025 uint32_t m_NextBlockId;
5027 VkDeviceSize CalcMaxBlockSize()
const;
5030 void Remove(VmaDeviceMemoryBlock* pBlock);
5034 void IncrementallySortBlocks();
5037 VkResult AllocateFromBlock(
5038 VmaDeviceMemoryBlock* pBlock,
5040 uint32_t currentFrameIndex,
5042 VkDeviceSize alignment,
5045 VmaSuballocationType suballocType,
5048 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5053 VMA_CLASS_NO_COPY(VmaPool_T)
5055 VmaBlockVector m_BlockVector;
5060 VkDeviceSize preferredBlockSize);
5063 uint32_t GetId()
const {
return m_Id; }
5064 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5066 #if VMA_STATS_STRING_ENABLED 5074 class VmaDefragmentator
5076 VMA_CLASS_NO_COPY(VmaDefragmentator)
5079 VmaBlockVector*
const m_pBlockVector;
5080 uint32_t m_CurrentFrameIndex;
5081 VkDeviceSize m_BytesMoved;
5082 uint32_t m_AllocationsMoved;
5084 struct AllocationInfo
5087 VkBool32* m_pChanged;
5090 m_hAllocation(VK_NULL_HANDLE),
5091 m_pChanged(VMA_NULL)
5096 struct AllocationInfoSizeGreater
5098 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5100 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5105 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5109 VmaDeviceMemoryBlock* m_pBlock;
5110 bool m_HasNonMovableAllocations;
5111 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5113 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5115 m_HasNonMovableAllocations(true),
5116 m_Allocations(pAllocationCallbacks),
5117 m_pMappedDataForDefragmentation(VMA_NULL)
5121 void CalcHasNonMovableAllocations()
5123 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5124 const size_t defragmentAllocCount = m_Allocations.size();
5125 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5128 void SortAllocationsBySizeDescecnding()
5130 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5133 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5138 void* m_pMappedDataForDefragmentation;
5141 struct BlockPointerLess
5143 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5145 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5147 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5149 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5155 struct BlockInfoCompareMoveDestination
5157 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5159 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5163 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5167 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5175 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5176 BlockInfoVector m_Blocks;
5178 VkResult DefragmentRound(
5179 VkDeviceSize maxBytesToMove,
5180 uint32_t maxAllocationsToMove);
5182 static bool MoveMakesSense(
5183 size_t dstBlockIndex, VkDeviceSize dstOffset,
5184 size_t srcBlockIndex, VkDeviceSize srcOffset);
5189 VmaBlockVector* pBlockVector,
5190 uint32_t currentFrameIndex);
5192 ~VmaDefragmentator();
5194 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5195 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5197 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5199 VkResult Defragment(
5200 VkDeviceSize maxBytesToMove,
5201 uint32_t maxAllocationsToMove);
5204 #if VMA_RECORDING_ENABLED 5211 void WriteConfiguration(
5212 const VkPhysicalDeviceProperties& devProps,
5213 const VkPhysicalDeviceMemoryProperties& memProps,
5214 bool dedicatedAllocationExtensionEnabled);
5217 void RecordCreateAllocator(uint32_t frameIndex);
5218 void RecordDestroyAllocator(uint32_t frameIndex);
5219 void RecordCreatePool(uint32_t frameIndex,
5222 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5223 void RecordAllocateMemory(uint32_t frameIndex,
5224 const VkMemoryRequirements& vkMemReq,
5227 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5228 const VkMemoryRequirements& vkMemReq,
5229 bool requiresDedicatedAllocation,
5230 bool prefersDedicatedAllocation,
5233 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5234 const VkMemoryRequirements& vkMemReq,
5235 bool requiresDedicatedAllocation,
5236 bool prefersDedicatedAllocation,
5239 void RecordFreeMemory(uint32_t frameIndex,
5241 void RecordSetAllocationUserData(uint32_t frameIndex,
5243 const void* pUserData);
5244 void RecordCreateLostAllocation(uint32_t frameIndex,
5246 void RecordMapMemory(uint32_t frameIndex,
5248 void RecordUnmapMemory(uint32_t frameIndex,
5250 void RecordFlushAllocation(uint32_t frameIndex,
5251 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5252 void RecordInvalidateAllocation(uint32_t frameIndex,
5253 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5254 void RecordCreateBuffer(uint32_t frameIndex,
5255 const VkBufferCreateInfo& bufCreateInfo,
5258 void RecordCreateImage(uint32_t frameIndex,
5259 const VkImageCreateInfo& imageCreateInfo,
5262 void RecordDestroyBuffer(uint32_t frameIndex,
5264 void RecordDestroyImage(uint32_t frameIndex,
5266 void RecordTouchAllocation(uint32_t frameIndex,
5268 void RecordGetAllocationInfo(uint32_t frameIndex,
5270 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5280 class UserDataString
5284 const char* GetString()
const {
return m_Str; }
5294 VMA_MUTEX m_FileMutex;
5296 int64_t m_StartCounter;
5298 void GetBasicParams(CallParams& outParams);
5302 #endif // #if VMA_RECORDING_ENABLED 5305 struct VmaAllocator_T
5307 VMA_CLASS_NO_COPY(VmaAllocator_T)
5310 bool m_UseKhrDedicatedAllocation;
5312 bool m_AllocationCallbacksSpecified;
5313 VkAllocationCallbacks m_AllocationCallbacks;
5317 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5318 VMA_MUTEX m_HeapSizeLimitMutex;
5320 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5321 VkPhysicalDeviceMemoryProperties m_MemProps;
5324 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5327 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5328 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5329 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5335 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5337 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5341 return m_VulkanFunctions;
5344 VkDeviceSize GetBufferImageGranularity()
const 5347 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5348 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5351 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5352 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5354 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5356 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5357 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5360 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5362 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5363 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5366 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5368 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5369 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5370 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5373 bool IsIntegratedGpu()
const 5375 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5378 #if VMA_RECORDING_ENABLED 5379 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5382 void GetBufferMemoryRequirements(
5384 VkMemoryRequirements& memReq,
5385 bool& requiresDedicatedAllocation,
5386 bool& prefersDedicatedAllocation)
const;
5387 void GetImageMemoryRequirements(
5389 VkMemoryRequirements& memReq,
5390 bool& requiresDedicatedAllocation,
5391 bool& prefersDedicatedAllocation)
const;
5394 VkResult AllocateMemory(
5395 const VkMemoryRequirements& vkMemReq,
5396 bool requiresDedicatedAllocation,
5397 bool prefersDedicatedAllocation,
5398 VkBuffer dedicatedBuffer,
5399 VkImage dedicatedImage,
5401 VmaSuballocationType suballocType,
5407 void CalculateStats(
VmaStats* pStats);
5409 #if VMA_STATS_STRING_ENABLED 5410 void PrintDetailedMap(
class VmaJsonWriter& json);
5413 VkResult Defragment(
5415 size_t allocationCount,
5416 VkBool32* pAllocationsChanged,
5424 void DestroyPool(
VmaPool pool);
5427 void SetCurrentFrameIndex(uint32_t frameIndex);
5428 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5430 void MakePoolAllocationsLost(
5432 size_t* pLostAllocationCount);
5433 VkResult CheckPoolCorruption(
VmaPool hPool);
5434 VkResult CheckCorruption(uint32_t memoryTypeBits);
5438 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5439 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5444 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5445 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5447 void FlushOrInvalidateAllocation(
5449 VkDeviceSize offset, VkDeviceSize size,
5450 VMA_CACHE_OPERATION op);
5452 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5455 VkDeviceSize m_PreferredLargeHeapBlockSize;
5457 VkPhysicalDevice m_PhysicalDevice;
5458 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5460 VMA_MUTEX m_PoolsMutex;
5462 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5463 uint32_t m_NextPoolId;
5467 #if VMA_RECORDING_ENABLED 5468 VmaRecorder* m_pRecorder;
5473 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5475 VkResult AllocateMemoryOfType(
5477 VkDeviceSize alignment,
5478 bool dedicatedAllocation,
5479 VkBuffer dedicatedBuffer,
5480 VkImage dedicatedImage,
5482 uint32_t memTypeIndex,
5483 VmaSuballocationType suballocType,
5487 VkResult AllocateDedicatedMemory(
5489 VmaSuballocationType suballocType,
5490 uint32_t memTypeIndex,
5492 bool isUserDataString,
5494 VkBuffer dedicatedBuffer,
5495 VkImage dedicatedImage,
5505 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5507 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5510 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5512 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5515 template<
typename T>
5518 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5521 template<
typename T>
5522 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5524 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5527 template<
typename T>
5528 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5533 VmaFree(hAllocator, ptr);
5537 template<
typename T>
5538 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5542 for(
size_t i = count; i--; )
5544 VmaFree(hAllocator, ptr);
5551 #if VMA_STATS_STRING_ENABLED 5553 class VmaStringBuilder
5556 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5557 size_t GetLength()
const {
return m_Data.size(); }
5558 const char* GetData()
const {
return m_Data.data(); }
5560 void Add(
char ch) { m_Data.push_back(ch); }
5561 void Add(
const char* pStr);
5562 void AddNewLine() { Add(
'\n'); }
5563 void AddNumber(uint32_t num);
5564 void AddNumber(uint64_t num);
5565 void AddPointer(
const void* ptr);
5568 VmaVector< char, VmaStlAllocator<char> > m_Data;
5571 void VmaStringBuilder::Add(
const char* pStr)
5573 const size_t strLen = strlen(pStr);
5576 const size_t oldCount = m_Data.size();
5577 m_Data.resize(oldCount + strLen);
5578 memcpy(m_Data.data() + oldCount, pStr, strLen);
5582 void VmaStringBuilder::AddNumber(uint32_t num)
5585 VmaUint32ToStr(buf,
sizeof(buf), num);
5589 void VmaStringBuilder::AddNumber(uint64_t num)
5592 VmaUint64ToStr(buf,
sizeof(buf), num);
5596 void VmaStringBuilder::AddPointer(
const void* ptr)
5599 VmaPtrToStr(buf,
sizeof(buf), ptr);
5603 #endif // #if VMA_STATS_STRING_ENABLED 5608 #if VMA_STATS_STRING_ENABLED 5612 VMA_CLASS_NO_COPY(VmaJsonWriter)
5614 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
5617 void BeginObject(
bool singleLine =
false);
5620 void BeginArray(
bool singleLine =
false);
5623 void WriteString(
const char* pStr);
5624 void BeginString(
const char* pStr = VMA_NULL);
5625 void ContinueString(
const char* pStr);
5626 void ContinueString(uint32_t n);
5627 void ContinueString(uint64_t n);
5628 void ContinueString_Pointer(
const void* ptr);
5629 void EndString(
const char* pStr = VMA_NULL);
5631 void WriteNumber(uint32_t n);
5632 void WriteNumber(uint64_t n);
5633 void WriteBool(
bool b);
5637 static const char*
const INDENT;
5639 enum COLLECTION_TYPE
5641 COLLECTION_TYPE_OBJECT,
5642 COLLECTION_TYPE_ARRAY,
5646 COLLECTION_TYPE type;
5647 uint32_t valueCount;
5648 bool singleLineMode;
5651 VmaStringBuilder& m_SB;
5652 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
5653 bool m_InsideString;
5655 void BeginValue(
bool isString);
5656 void WriteIndent(
bool oneLess =
false);
5659 const char*
const VmaJsonWriter::INDENT =
" ";
5661 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
5663 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
5664 m_InsideString(false)
5668 VmaJsonWriter::~VmaJsonWriter()
5670 VMA_ASSERT(!m_InsideString);
5671 VMA_ASSERT(m_Stack.empty());
5674 void VmaJsonWriter::BeginObject(
bool singleLine)
5676 VMA_ASSERT(!m_InsideString);
5682 item.type = COLLECTION_TYPE_OBJECT;
5683 item.valueCount = 0;
5684 item.singleLineMode = singleLine;
5685 m_Stack.push_back(item);
5688 void VmaJsonWriter::EndObject()
5690 VMA_ASSERT(!m_InsideString);
5695 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
5699 void VmaJsonWriter::BeginArray(
bool singleLine)
5701 VMA_ASSERT(!m_InsideString);
5707 item.type = COLLECTION_TYPE_ARRAY;
5708 item.valueCount = 0;
5709 item.singleLineMode = singleLine;
5710 m_Stack.push_back(item);
5713 void VmaJsonWriter::EndArray()
5715 VMA_ASSERT(!m_InsideString);
5720 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
5724 void VmaJsonWriter::WriteString(
const char* pStr)
5730 void VmaJsonWriter::BeginString(
const char* pStr)
5732 VMA_ASSERT(!m_InsideString);
5736 m_InsideString =
true;
5737 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5739 ContinueString(pStr);
5743 void VmaJsonWriter::ContinueString(
const char* pStr)
5745 VMA_ASSERT(m_InsideString);
5747 const size_t strLen = strlen(pStr);
5748 for(
size_t i = 0; i < strLen; ++i)
5781 VMA_ASSERT(0 &&
"Character not currently supported.");
5787 void VmaJsonWriter::ContinueString(uint32_t n)
5789 VMA_ASSERT(m_InsideString);
5793 void VmaJsonWriter::ContinueString(uint64_t n)
5795 VMA_ASSERT(m_InsideString);
5799 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
5801 VMA_ASSERT(m_InsideString);
5802 m_SB.AddPointer(ptr);
5805 void VmaJsonWriter::EndString(
const char* pStr)
5807 VMA_ASSERT(m_InsideString);
5808 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5810 ContinueString(pStr);
5813 m_InsideString =
false;
5816 void VmaJsonWriter::WriteNumber(uint32_t n)
5818 VMA_ASSERT(!m_InsideString);
5823 void VmaJsonWriter::WriteNumber(uint64_t n)
5825 VMA_ASSERT(!m_InsideString);
5830 void VmaJsonWriter::WriteBool(
bool b)
5832 VMA_ASSERT(!m_InsideString);
5834 m_SB.Add(b ?
"true" :
"false");
5837 void VmaJsonWriter::WriteNull()
5839 VMA_ASSERT(!m_InsideString);
5844 void VmaJsonWriter::BeginValue(
bool isString)
5846 if(!m_Stack.empty())
5848 StackItem& currItem = m_Stack.back();
5849 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5850 currItem.valueCount % 2 == 0)
5852 VMA_ASSERT(isString);
5855 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5856 currItem.valueCount % 2 != 0)
5860 else if(currItem.valueCount > 0)
5869 ++currItem.valueCount;
5873 void VmaJsonWriter::WriteIndent(
bool oneLess)
5875 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
5879 size_t count = m_Stack.size();
5880 if(count > 0 && oneLess)
5884 for(
size_t i = 0; i < count; ++i)
5891 #endif // #if VMA_STATS_STRING_ENABLED 5895 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
5897 if(IsUserDataString())
5899 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
5901 FreeUserDataString(hAllocator);
5903 if(pUserData != VMA_NULL)
5905 const char*
const newStrSrc = (
char*)pUserData;
5906 const size_t newStrLen = strlen(newStrSrc);
5907 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
5908 memcpy(newStrDst, newStrSrc, newStrLen + 1);
5909 m_pUserData = newStrDst;
5914 m_pUserData = pUserData;
5918 void VmaAllocation_T::ChangeBlockAllocation(
5920 VmaDeviceMemoryBlock* block,
5921 VkDeviceSize offset)
5923 VMA_ASSERT(block != VMA_NULL);
5924 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5927 if(block != m_BlockAllocation.m_Block)
5929 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
5930 if(IsPersistentMap())
5932 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
5933 block->Map(hAllocator, mapRefCount, VMA_NULL);
5936 m_BlockAllocation.m_Block = block;
5937 m_BlockAllocation.m_Offset = offset;
5940 VkDeviceSize VmaAllocation_T::GetOffset()
const 5944 case ALLOCATION_TYPE_BLOCK:
5945 return m_BlockAllocation.m_Offset;
5946 case ALLOCATION_TYPE_DEDICATED:
5954 VkDeviceMemory VmaAllocation_T::GetMemory()
const 5958 case ALLOCATION_TYPE_BLOCK:
5959 return m_BlockAllocation.m_Block->GetDeviceMemory();
5960 case ALLOCATION_TYPE_DEDICATED:
5961 return m_DedicatedAllocation.m_hMemory;
5964 return VK_NULL_HANDLE;
5968 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 5972 case ALLOCATION_TYPE_BLOCK:
5973 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
5974 case ALLOCATION_TYPE_DEDICATED:
5975 return m_DedicatedAllocation.m_MemoryTypeIndex;
5982 void* VmaAllocation_T::GetMappedData()
const 5986 case ALLOCATION_TYPE_BLOCK:
5989 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
5990 VMA_ASSERT(pBlockData != VMA_NULL);
5991 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
5998 case ALLOCATION_TYPE_DEDICATED:
5999 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
6000 return m_DedicatedAllocation.m_pMappedData;
6007 bool VmaAllocation_T::CanBecomeLost()
const 6011 case ALLOCATION_TYPE_BLOCK:
6012 return m_BlockAllocation.m_CanBecomeLost;
6013 case ALLOCATION_TYPE_DEDICATED:
6021 VmaPool VmaAllocation_T::GetPool()
const 6023 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6024 return m_BlockAllocation.m_hPool;
6027 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6029 VMA_ASSERT(CanBecomeLost());
6035 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6038 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6043 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6049 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6059 #if VMA_STATS_STRING_ENABLED 6062 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6071 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6073 json.WriteString(
"Type");
6074 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6076 json.WriteString(
"Size");
6077 json.WriteNumber(m_Size);
6079 if(m_pUserData != VMA_NULL)
6081 json.WriteString(
"UserData");
6082 if(IsUserDataString())
6084 json.WriteString((
const char*)m_pUserData);
6089 json.ContinueString_Pointer(m_pUserData);
6094 json.WriteString(
"CreationFrameIndex");
6095 json.WriteNumber(m_CreationFrameIndex);
6097 json.WriteString(
"LastUseFrameIndex");
6098 json.WriteNumber(GetLastUseFrameIndex());
6100 if(m_BufferImageUsage != 0)
6102 json.WriteString(
"Usage");
6103 json.WriteNumber(m_BufferImageUsage);
6109 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6111 VMA_ASSERT(IsUserDataString());
6112 if(m_pUserData != VMA_NULL)
6114 char*
const oldStr = (
char*)m_pUserData;
6115 const size_t oldStrLen = strlen(oldStr);
6116 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6117 m_pUserData = VMA_NULL;
6121 void VmaAllocation_T::BlockAllocMap()
6123 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6125 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6131 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6135 void VmaAllocation_T::BlockAllocUnmap()
6137 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6139 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6145 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6149 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6151 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6155 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6157 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6158 *ppData = m_DedicatedAllocation.m_pMappedData;
6164 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6165 return VK_ERROR_MEMORY_MAP_FAILED;
6170 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6171 hAllocator->m_hDevice,
6172 m_DedicatedAllocation.m_hMemory,
6177 if(result == VK_SUCCESS)
6179 m_DedicatedAllocation.m_pMappedData = *ppData;
6186 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6188 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6190 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6195 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6196 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6197 hAllocator->m_hDevice,
6198 m_DedicatedAllocation.m_hMemory);
6203 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6207 #if VMA_STATS_STRING_ENABLED 6209 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6213 json.WriteString(
"Blocks");
6216 json.WriteString(
"Allocations");
6219 json.WriteString(
"UnusedRanges");
6222 json.WriteString(
"UsedBytes");
6225 json.WriteString(
"UnusedBytes");
6230 json.WriteString(
"AllocationSize");
6231 json.BeginObject(
true);
6232 json.WriteString(
"Min");
6234 json.WriteString(
"Avg");
6236 json.WriteString(
"Max");
6243 json.WriteString(
"UnusedRangeSize");
6244 json.BeginObject(
true);
6245 json.WriteString(
"Min");
6247 json.WriteString(
"Avg");
6249 json.WriteString(
"Max");
6257 #endif // #if VMA_STATS_STRING_ENABLED 6259 struct VmaSuballocationItemSizeLess
6262 const VmaSuballocationList::iterator lhs,
6263 const VmaSuballocationList::iterator rhs)
const 6265 return lhs->size < rhs->size;
6268 const VmaSuballocationList::iterator lhs,
6269 VkDeviceSize rhsSize)
const 6271 return lhs->size < rhsSize;
6279 #if VMA_STATS_STRING_ENABLED 6281 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6282 VkDeviceSize unusedBytes,
6283 size_t allocationCount,
6284 size_t unusedRangeCount)
const 6288 json.WriteString(
"TotalBytes");
6289 json.WriteNumber(GetSize());
6291 json.WriteString(
"UnusedBytes");
6292 json.WriteNumber(unusedBytes);
6294 json.WriteString(
"Allocations");
6295 json.WriteNumber((uint64_t)allocationCount);
6297 json.WriteString(
"UnusedRanges");
6298 json.WriteNumber((uint64_t)unusedRangeCount);
6300 json.WriteString(
"Suballocations");
6304 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6305 VkDeviceSize offset,
6308 json.BeginObject(
true);
6310 json.WriteString(
"Offset");
6311 json.WriteNumber(offset);
6313 hAllocation->PrintParameters(json);
6318 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6319 VkDeviceSize offset,
6320 VkDeviceSize size)
const 6322 json.BeginObject(
true);
6324 json.WriteString(
"Offset");
6325 json.WriteNumber(offset);
6327 json.WriteString(
"Type");
6328 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6330 json.WriteString(
"Size");
6331 json.WriteNumber(size);
6336 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6342 #endif // #if VMA_STATS_STRING_ENABLED 6347 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6350 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6351 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6355 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6359 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6361 VmaBlockMetadata::Init(size);
6363 m_SumFreeSize = size;
6365 VmaSuballocation suballoc = {};
6366 suballoc.offset = 0;
6367 suballoc.size = size;
6368 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6369 suballoc.hAllocation = VK_NULL_HANDLE;
6371 m_Suballocations.push_back(suballoc);
6372 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6374 m_FreeSuballocationsBySize.push_back(suballocItem);
6377 bool VmaBlockMetadata_Generic::Validate()
const 6379 if(m_Suballocations.empty())
6385 VkDeviceSize calculatedOffset = 0;
6387 uint32_t calculatedFreeCount = 0;
6389 VkDeviceSize calculatedSumFreeSize = 0;
6392 size_t freeSuballocationsToRegister = 0;
6394 bool prevFree =
false;
6396 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6397 suballocItem != m_Suballocations.cend();
6400 const VmaSuballocation& subAlloc = *suballocItem;
6403 if(subAlloc.offset != calculatedOffset)
6408 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6410 if(prevFree && currFree)
6415 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
6422 calculatedSumFreeSize += subAlloc.size;
6423 ++calculatedFreeCount;
6424 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6426 ++freeSuballocationsToRegister;
6430 if(subAlloc.size < VMA_DEBUG_MARGIN)
6437 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
6441 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
6447 if(VMA_DEBUG_MARGIN > 0 && !prevFree)
6453 calculatedOffset += subAlloc.size;
6454 prevFree = currFree;
6459 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
6464 VkDeviceSize lastSize = 0;
6465 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6467 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6470 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
6475 if(suballocItem->size < lastSize)
6480 lastSize = suballocItem->size;
6484 if(!ValidateFreeSuballocationList() ||
6485 (calculatedOffset != GetSize()) ||
6486 (calculatedSumFreeSize != m_SumFreeSize) ||
6487 (calculatedFreeCount != m_FreeCount))
6495 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6497 if(!m_FreeSuballocationsBySize.empty())
6499 return m_FreeSuballocationsBySize.back()->size;
6507 bool VmaBlockMetadata_Generic::IsEmpty()
const 6509 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6512 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6516 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6528 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6529 suballocItem != m_Suballocations.cend();
6532 const VmaSuballocation& suballoc = *suballocItem;
6533 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6546 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6548 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6550 inoutStats.
size += GetSize();
6557 #if VMA_STATS_STRING_ENABLED 6559 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6561 PrintDetailedMap_Begin(json,
6563 m_Suballocations.size() - (size_t)m_FreeCount,
6567 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6568 suballocItem != m_Suballocations.cend();
6569 ++suballocItem, ++i)
6571 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6573 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6577 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6581 PrintDetailedMap_End(json);
6584 #endif // #if VMA_STATS_STRING_ENABLED 6596 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6597 uint32_t currentFrameIndex,
6598 uint32_t frameInUseCount,
6599 VkDeviceSize bufferImageGranularity,
6600 VkDeviceSize allocSize,
6601 VkDeviceSize allocAlignment,
6603 VmaSuballocationType allocType,
6604 bool canMakeOtherLost,
6605 VmaAllocationRequest* pAllocationRequest)
6607 VMA_ASSERT(allocSize > 0);
6608 VMA_ASSERT(!upperAddress);
6609 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6610 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6611 VMA_HEAVY_ASSERT(Validate());
6614 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6620 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6621 if(freeSuballocCount > 0)
6626 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6627 m_FreeSuballocationsBySize.data(),
6628 m_FreeSuballocationsBySize.data() + freeSuballocCount,
6629 allocSize + 2 * VMA_DEBUG_MARGIN,
6630 VmaSuballocationItemSizeLess());
6631 size_t index = it - m_FreeSuballocationsBySize.data();
6632 for(; index < freeSuballocCount; ++index)
6637 bufferImageGranularity,
6641 m_FreeSuballocationsBySize[index],
6643 &pAllocationRequest->offset,
6644 &pAllocationRequest->itemsToMakeLostCount,
6645 &pAllocationRequest->sumFreeSize,
6646 &pAllocationRequest->sumItemSize))
6648 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6656 for(
size_t index = freeSuballocCount; index--; )
6661 bufferImageGranularity,
6665 m_FreeSuballocationsBySize[index],
6667 &pAllocationRequest->offset,
6668 &pAllocationRequest->itemsToMakeLostCount,
6669 &pAllocationRequest->sumFreeSize,
6670 &pAllocationRequest->sumItemSize))
6672 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6679 if(canMakeOtherLost)
6683 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
6684 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
6686 VmaAllocationRequest tmpAllocRequest = {};
6687 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
6688 suballocIt != m_Suballocations.end();
6691 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
6692 suballocIt->hAllocation->CanBecomeLost())
6697 bufferImageGranularity,
6703 &tmpAllocRequest.offset,
6704 &tmpAllocRequest.itemsToMakeLostCount,
6705 &tmpAllocRequest.sumFreeSize,
6706 &tmpAllocRequest.sumItemSize))
6708 tmpAllocRequest.item = suballocIt;
6710 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
6712 *pAllocationRequest = tmpAllocRequest;
6718 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
6727 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
6728 uint32_t currentFrameIndex,
6729 uint32_t frameInUseCount,
6730 VmaAllocationRequest* pAllocationRequest)
6732 while(pAllocationRequest->itemsToMakeLostCount > 0)
6734 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
6736 ++pAllocationRequest->item;
6738 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6739 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
6740 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
6741 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6743 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
6744 --pAllocationRequest->itemsToMakeLostCount;
6752 VMA_HEAVY_ASSERT(Validate());
6753 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6754 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
6759 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6761 uint32_t lostAllocationCount = 0;
6762 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6763 it != m_Suballocations.end();
6766 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
6767 it->hAllocation->CanBecomeLost() &&
6768 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6770 it = FreeSuballocation(it);
6771 ++lostAllocationCount;
6774 return lostAllocationCount;
6777 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
6779 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6780 it != m_Suballocations.end();
6783 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6785 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
6787 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
6788 return VK_ERROR_VALIDATION_FAILED_EXT;
6790 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
6792 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
6793 return VK_ERROR_VALIDATION_FAILED_EXT;
6801 void VmaBlockMetadata_Generic::Alloc(
6802 const VmaAllocationRequest& request,
6803 VmaSuballocationType type,
6804 VkDeviceSize allocSize,
6808 VMA_ASSERT(!upperAddress);
6809 VMA_ASSERT(request.item != m_Suballocations.end());
6810 VmaSuballocation& suballoc = *request.item;
6812 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6814 VMA_ASSERT(request.offset >= suballoc.offset);
6815 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
6816 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
6817 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
6821 UnregisterFreeSuballocation(request.item);
6823 suballoc.offset = request.offset;
6824 suballoc.size = allocSize;
6825 suballoc.type = type;
6826 suballoc.hAllocation = hAllocation;
6831 VmaSuballocation paddingSuballoc = {};
6832 paddingSuballoc.offset = request.offset + allocSize;
6833 paddingSuballoc.size = paddingEnd;
6834 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6835 VmaSuballocationList::iterator next = request.item;
6837 const VmaSuballocationList::iterator paddingEndItem =
6838 m_Suballocations.insert(next, paddingSuballoc);
6839 RegisterFreeSuballocation(paddingEndItem);
6845 VmaSuballocation paddingSuballoc = {};
6846 paddingSuballoc.offset = request.offset - paddingBegin;
6847 paddingSuballoc.size = paddingBegin;
6848 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6849 const VmaSuballocationList::iterator paddingBeginItem =
6850 m_Suballocations.insert(request.item, paddingSuballoc);
6851 RegisterFreeSuballocation(paddingBeginItem);
6855 m_FreeCount = m_FreeCount - 1;
6856 if(paddingBegin > 0)
6864 m_SumFreeSize -= allocSize;
6867 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
6869 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
6870 suballocItem != m_Suballocations.end();
6873 VmaSuballocation& suballoc = *suballocItem;
6874 if(suballoc.hAllocation == allocation)
6876 FreeSuballocation(suballocItem);
6877 VMA_HEAVY_ASSERT(Validate());
6881 VMA_ASSERT(0 &&
"Not found!");
6884 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
6886 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
6887 suballocItem != m_Suballocations.end();
6890 VmaSuballocation& suballoc = *suballocItem;
6891 if(suballoc.offset == offset)
6893 FreeSuballocation(suballocItem);
6897 VMA_ASSERT(0 &&
"Not found!");
6900 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 6902 VkDeviceSize lastSize = 0;
6903 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
6905 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
6907 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6912 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6917 if(it->size < lastSize)
6923 lastSize = it->size;
6928 bool VmaBlockMetadata_Generic::CheckAllocation(
6929 uint32_t currentFrameIndex,
6930 uint32_t frameInUseCount,
6931 VkDeviceSize bufferImageGranularity,
6932 VkDeviceSize allocSize,
6933 VkDeviceSize allocAlignment,
6934 VmaSuballocationType allocType,
6935 VmaSuballocationList::const_iterator suballocItem,
6936 bool canMakeOtherLost,
6937 VkDeviceSize* pOffset,
6938 size_t* itemsToMakeLostCount,
6939 VkDeviceSize* pSumFreeSize,
6940 VkDeviceSize* pSumItemSize)
const 6942 VMA_ASSERT(allocSize > 0);
6943 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6944 VMA_ASSERT(suballocItem != m_Suballocations.cend());
6945 VMA_ASSERT(pOffset != VMA_NULL);
6947 *itemsToMakeLostCount = 0;
6951 if(canMakeOtherLost)
6953 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6955 *pSumFreeSize = suballocItem->size;
6959 if(suballocItem->hAllocation->CanBecomeLost() &&
6960 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6962 ++*itemsToMakeLostCount;
6963 *pSumItemSize = suballocItem->size;
6972 if(GetSize() - suballocItem->offset < allocSize)
6978 *pOffset = suballocItem->offset;
6981 if(VMA_DEBUG_MARGIN > 0)
6983 *pOffset += VMA_DEBUG_MARGIN;
6987 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
6991 if(bufferImageGranularity > 1)
6993 bool bufferImageGranularityConflict =
false;
6994 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6995 while(prevSuballocItem != m_Suballocations.cbegin())
6998 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6999 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7001 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7003 bufferImageGranularityConflict =
true;
7011 if(bufferImageGranularityConflict)
7013 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7019 if(*pOffset >= suballocItem->offset + suballocItem->size)
7025 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
7028 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7030 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
7032 if(suballocItem->offset + totalSize > GetSize())
7039 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7040 if(totalSize > suballocItem->size)
7042 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7043 while(remainingSize > 0)
7046 if(lastSuballocItem == m_Suballocations.cend())
7050 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7052 *pSumFreeSize += lastSuballocItem->size;
7056 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7057 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7058 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7060 ++*itemsToMakeLostCount;
7061 *pSumItemSize += lastSuballocItem->size;
7068 remainingSize = (lastSuballocItem->size < remainingSize) ?
7069 remainingSize - lastSuballocItem->size : 0;
7075 if(bufferImageGranularity > 1)
7077 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7079 while(nextSuballocItem != m_Suballocations.cend())
7081 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7082 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7084 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7086 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7087 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7088 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7090 ++*itemsToMakeLostCount;
7109 const VmaSuballocation& suballoc = *suballocItem;
7110 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7112 *pSumFreeSize = suballoc.size;
7115 if(suballoc.size < allocSize)
7121 *pOffset = suballoc.offset;
7124 if(VMA_DEBUG_MARGIN > 0)
7126 *pOffset += VMA_DEBUG_MARGIN;
7130 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7134 if(bufferImageGranularity > 1)
7136 bool bufferImageGranularityConflict =
false;
7137 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7138 while(prevSuballocItem != m_Suballocations.cbegin())
7141 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7142 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7144 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7146 bufferImageGranularityConflict =
true;
7154 if(bufferImageGranularityConflict)
7156 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7161 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7164 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7167 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7174 if(bufferImageGranularity > 1)
7176 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7178 while(nextSuballocItem != m_Suballocations.cend())
7180 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7181 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7183 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7202 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7204 VMA_ASSERT(item != m_Suballocations.end());
7205 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7207 VmaSuballocationList::iterator nextItem = item;
7209 VMA_ASSERT(nextItem != m_Suballocations.end());
7210 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7212 item->size += nextItem->size;
7214 m_Suballocations.erase(nextItem);
7217 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7220 VmaSuballocation& suballoc = *suballocItem;
7221 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7222 suballoc.hAllocation = VK_NULL_HANDLE;
7226 m_SumFreeSize += suballoc.size;
7229 bool mergeWithNext =
false;
7230 bool mergeWithPrev =
false;
7232 VmaSuballocationList::iterator nextItem = suballocItem;
7234 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7236 mergeWithNext =
true;
7239 VmaSuballocationList::iterator prevItem = suballocItem;
7240 if(suballocItem != m_Suballocations.begin())
7243 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7245 mergeWithPrev =
true;
7251 UnregisterFreeSuballocation(nextItem);
7252 MergeFreeWithNext(suballocItem);
7257 UnregisterFreeSuballocation(prevItem);
7258 MergeFreeWithNext(prevItem);
7259 RegisterFreeSuballocation(prevItem);
7264 RegisterFreeSuballocation(suballocItem);
7265 return suballocItem;
7269 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7271 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7272 VMA_ASSERT(item->size > 0);
7276 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7278 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7280 if(m_FreeSuballocationsBySize.empty())
7282 m_FreeSuballocationsBySize.push_back(item);
7286 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7294 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7296 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7297 VMA_ASSERT(item->size > 0);
7301 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7303 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7305 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7306 m_FreeSuballocationsBySize.data(),
7307 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7309 VmaSuballocationItemSizeLess());
7310 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7311 index < m_FreeSuballocationsBySize.size();
7314 if(m_FreeSuballocationsBySize[index] == item)
7316 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7319 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7321 VMA_ASSERT(0 &&
"Not found.");
7330 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7332 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7333 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7334 m_1stVectorIndex(0),
7335 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7336 m_1stNullItemsBeginCount(0),
7337 m_1stNullItemsMiddleCount(0),
7338 m_2ndNullItemsCount(0)
7342 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7346 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7348 VmaBlockMetadata::Init(size);
7349 m_SumFreeSize = size;
7352 bool VmaBlockMetadata_Linear::Validate()
const 7354 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7355 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7357 if(suballocations2nd.empty() != (m_2ndVectorMode == SECOND_VECTOR_EMPTY))
7361 if(suballocations1st.empty() && !suballocations2nd.empty() &&
7362 m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7366 if(!suballocations1st.empty())
7369 if(suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
7374 if(suballocations1st.back().hAllocation == VK_NULL_HANDLE)
7379 if(!suballocations2nd.empty())
7382 if(suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
7388 if(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount > suballocations1st.size())
7392 if(m_2ndNullItemsCount > suballocations2nd.size())
7397 VkDeviceSize sumUsedSize = 0;
7398 const size_t suballoc1stCount = suballocations1st.size();
7399 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7401 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7403 const size_t suballoc2ndCount = suballocations2nd.size();
7404 size_t nullItem2ndCount = 0;
7405 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7407 const VmaSuballocation& suballoc = suballocations2nd[i];
7408 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7410 if(currFree != (suballoc.hAllocation == VK_NULL_HANDLE))
7414 if(suballoc.offset < offset)
7421 if(suballoc.hAllocation->GetOffset() != suballoc.offset)
7425 if(suballoc.hAllocation->GetSize() != suballoc.size)
7429 sumUsedSize += suballoc.size;
7436 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7439 if(nullItem2ndCount != m_2ndNullItemsCount)
7445 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7447 const VmaSuballocation& suballoc = suballocations1st[i];
7448 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE ||
7449 suballoc.hAllocation != VK_NULL_HANDLE)
7455 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7457 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7459 const VmaSuballocation& suballoc = suballocations1st[i];
7460 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7462 if(currFree != (suballoc.hAllocation == VK_NULL_HANDLE))
7466 if(suballoc.offset < offset)
7470 if(i < m_1stNullItemsBeginCount && !currFree)
7477 if(suballoc.hAllocation->GetOffset() != suballoc.offset)
7481 if(suballoc.hAllocation->GetSize() != suballoc.size)
7485 sumUsedSize += suballoc.size;
7492 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7494 if(nullItem1stCount != m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount)
7499 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7501 const size_t suballoc2ndCount = suballocations2nd.size();
7502 size_t nullItem2ndCount = 0;
7503 for(
size_t i = suballoc2ndCount; i--; )
7505 const VmaSuballocation& suballoc = suballocations2nd[i];
7506 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7508 if(currFree != (suballoc.hAllocation == VK_NULL_HANDLE))
7512 if(suballoc.offset < offset)
7519 if(suballoc.hAllocation->GetOffset() != suballoc.offset)
7523 if(suballoc.hAllocation->GetSize() != suballoc.size)
7527 sumUsedSize += suballoc.size;
7534 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7537 if(nullItem2ndCount != m_2ndNullItemsCount)
7543 if(offset > GetSize())
7547 if(m_SumFreeSize != GetSize() - sumUsedSize)
7555 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7557 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7558 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7561 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7563 const VkDeviceSize size = GetSize();
7575 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7577 switch(m_2ndVectorMode)
7579 case SECOND_VECTOR_EMPTY:
7585 const size_t suballocations1stCount = suballocations1st.size();
7586 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
7587 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
7588 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
7590 firstSuballoc.offset,
7591 size - (lastSuballoc.offset + lastSuballoc.size));
7595 case SECOND_VECTOR_RING_BUFFER:
7600 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7601 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
7602 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
7603 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
7607 case SECOND_VECTOR_DOUBLE_STACK:
7612 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7613 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
7614 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
7615 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
7625 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7627 const VkDeviceSize size = GetSize();
7628 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7629 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7630 const size_t suballoc1stCount = suballocations1st.size();
7631 const size_t suballoc2ndCount = suballocations2nd.size();
7642 VkDeviceSize lastOffset = 0;
7644 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7646 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7647 size_t nextAlloc2ndIndex = 0;
7648 while(lastOffset < freeSpace2ndTo1stEnd)
7651 while(nextAlloc2ndIndex < suballoc2ndCount &&
7652 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7654 ++nextAlloc2ndIndex;
7658 if(nextAlloc2ndIndex < suballoc2ndCount)
7660 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7663 if(lastOffset < suballoc.offset)
7666 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7680 lastOffset = suballoc.offset + suballoc.size;
7681 ++nextAlloc2ndIndex;
7687 if(lastOffset < freeSpace2ndTo1stEnd)
7689 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7697 lastOffset = freeSpace2ndTo1stEnd;
7702 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7703 const VkDeviceSize freeSpace1stTo2ndEnd =
7704 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
7705 while(lastOffset < freeSpace1stTo2ndEnd)
7708 while(nextAlloc1stIndex < suballoc1stCount &&
7709 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
7711 ++nextAlloc1stIndex;
7715 if(nextAlloc1stIndex < suballoc1stCount)
7717 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
7720 if(lastOffset < suballoc.offset)
7723 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7737 lastOffset = suballoc.offset + suballoc.size;
7738 ++nextAlloc1stIndex;
7744 if(lastOffset < freeSpace1stTo2ndEnd)
7746 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
7754 lastOffset = freeSpace1stTo2ndEnd;
7758 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7760 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
7761 while(lastOffset < size)
7764 while(nextAlloc2ndIndex != SIZE_MAX &&
7765 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7767 --nextAlloc2ndIndex;
7771 if(nextAlloc2ndIndex != SIZE_MAX)
7773 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7776 if(lastOffset < suballoc.offset)
7779 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7793 lastOffset = suballoc.offset + suballoc.size;
7794 --nextAlloc2ndIndex;
7800 if(lastOffset < size)
7802 const VkDeviceSize unusedRangeSize = size - lastOffset;
7818 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 7820 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7821 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7822 const VkDeviceSize size = GetSize();
7823 const size_t suballoc1stCount = suballocations1st.size();
7824 const size_t suballoc2ndCount = suballocations2nd.size();
7826 inoutStats.
size += size;
7828 VkDeviceSize lastOffset = 0;
7830 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7832 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7833 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
7834 while(lastOffset < freeSpace2ndTo1stEnd)
7837 while(nextAlloc2ndIndex < suballoc2ndCount &&
7838 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7840 ++nextAlloc2ndIndex;
7844 if(nextAlloc2ndIndex < suballoc2ndCount)
7846 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7849 if(lastOffset < suballoc.offset)
7852 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7863 lastOffset = suballoc.offset + suballoc.size;
7864 ++nextAlloc2ndIndex;
7869 if(lastOffset < freeSpace2ndTo1stEnd)
7872 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7879 lastOffset = freeSpace2ndTo1stEnd;
7884 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7885 const VkDeviceSize freeSpace1stTo2ndEnd =
7886 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
7887 while(lastOffset < freeSpace1stTo2ndEnd)
7890 while(nextAlloc1stIndex < suballoc1stCount &&
7891 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
7893 ++nextAlloc1stIndex;
7897 if(nextAlloc1stIndex < suballoc1stCount)
7899 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
7902 if(lastOffset < suballoc.offset)
7905 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7916 lastOffset = suballoc.offset + suballoc.size;
7917 ++nextAlloc1stIndex;
7922 if(lastOffset < freeSpace1stTo2ndEnd)
7925 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
7932 lastOffset = freeSpace1stTo2ndEnd;
7936 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7938 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
7939 while(lastOffset < size)
7942 while(nextAlloc2ndIndex != SIZE_MAX &&
7943 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7945 --nextAlloc2ndIndex;
7949 if(nextAlloc2ndIndex != SIZE_MAX)
7951 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7954 if(lastOffset < suballoc.offset)
7957 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7968 lastOffset = suballoc.offset + suballoc.size;
7969 --nextAlloc2ndIndex;
7974 if(lastOffset < size)
7977 const VkDeviceSize unusedRangeSize = size - lastOffset;
7990 #if VMA_STATS_STRING_ENABLED 7991 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 7993 const VkDeviceSize size = GetSize();
7994 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7995 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7996 const size_t suballoc1stCount = suballocations1st.size();
7997 const size_t suballoc2ndCount = suballocations2nd.size();
8001 size_t unusedRangeCount = 0;
8002 VkDeviceSize usedBytes = 0;
8004 VkDeviceSize lastOffset = 0;
8006 size_t alloc2ndCount = 0;
8007 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8009 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8010 size_t nextAlloc2ndIndex = 0;
8011 while(lastOffset < freeSpace2ndTo1stEnd)
8014 while(nextAlloc2ndIndex < suballoc2ndCount &&
8015 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8017 ++nextAlloc2ndIndex;
8021 if(nextAlloc2ndIndex < suballoc2ndCount)
8023 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8026 if(lastOffset < suballoc.offset)
8035 usedBytes += suballoc.size;
8038 lastOffset = suballoc.offset + suballoc.size;
8039 ++nextAlloc2ndIndex;
8044 if(lastOffset < freeSpace2ndTo1stEnd)
8051 lastOffset = freeSpace2ndTo1stEnd;
8056 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8057 size_t alloc1stCount = 0;
8058 const VkDeviceSize freeSpace1stTo2ndEnd =
8059 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8060 while(lastOffset < freeSpace1stTo2ndEnd)
8063 while(nextAlloc1stIndex < suballoc1stCount &&
8064 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8066 ++nextAlloc1stIndex;
8070 if(nextAlloc1stIndex < suballoc1stCount)
8072 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8075 if(lastOffset < suballoc.offset)
8084 usedBytes += suballoc.size;
8087 lastOffset = suballoc.offset + suballoc.size;
8088 ++nextAlloc1stIndex;
8093 if(lastOffset < size)
8100 lastOffset = freeSpace1stTo2ndEnd;
8104 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8106 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8107 while(lastOffset < size)
8110 while(nextAlloc2ndIndex != SIZE_MAX &&
8111 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8113 --nextAlloc2ndIndex;
8117 if(nextAlloc2ndIndex != SIZE_MAX)
8119 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8122 if(lastOffset < suballoc.offset)
8131 usedBytes += suballoc.size;
8134 lastOffset = suballoc.offset + suballoc.size;
8135 --nextAlloc2ndIndex;
8140 if(lastOffset < size)
8152 const VkDeviceSize unusedBytes = size - usedBytes;
8153 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8158 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8160 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8161 size_t nextAlloc2ndIndex = 0;
8162 while(lastOffset < freeSpace2ndTo1stEnd)
8165 while(nextAlloc2ndIndex < suballoc2ndCount &&
8166 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8168 ++nextAlloc2ndIndex;
8172 if(nextAlloc2ndIndex < suballoc2ndCount)
8174 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8177 if(lastOffset < suballoc.offset)
8180 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8181 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8186 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8189 lastOffset = suballoc.offset + suballoc.size;
8190 ++nextAlloc2ndIndex;
8195 if(lastOffset < freeSpace2ndTo1stEnd)
8198 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8199 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8203 lastOffset = freeSpace2ndTo1stEnd;
8208 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8209 while(lastOffset < freeSpace1stTo2ndEnd)
8212 while(nextAlloc1stIndex < suballoc1stCount &&
8213 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8215 ++nextAlloc1stIndex;
8219 if(nextAlloc1stIndex < suballoc1stCount)
8221 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8224 if(lastOffset < suballoc.offset)
8227 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8228 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8233 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8236 lastOffset = suballoc.offset + suballoc.size;
8237 ++nextAlloc1stIndex;
8242 if(lastOffset < freeSpace1stTo2ndEnd)
8245 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8246 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8250 lastOffset = freeSpace1stTo2ndEnd;
8254 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8256 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8257 while(lastOffset < size)
8260 while(nextAlloc2ndIndex != SIZE_MAX &&
8261 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8263 --nextAlloc2ndIndex;
8267 if(nextAlloc2ndIndex != SIZE_MAX)
8269 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8272 if(lastOffset < suballoc.offset)
8275 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8276 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8281 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8284 lastOffset = suballoc.offset + suballoc.size;
8285 --nextAlloc2ndIndex;
8290 if(lastOffset < size)
8293 const VkDeviceSize unusedRangeSize = size - lastOffset;
8294 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8303 PrintDetailedMap_End(json);
8305 #endif // #if VMA_STATS_STRING_ENABLED 8307 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8308 uint32_t currentFrameIndex,
8309 uint32_t frameInUseCount,
8310 VkDeviceSize bufferImageGranularity,
8311 VkDeviceSize allocSize,
8312 VkDeviceSize allocAlignment,
8314 VmaSuballocationType allocType,
8315 bool canMakeOtherLost,
8316 VmaAllocationRequest* pAllocationRequest)
8318 VMA_ASSERT(allocSize > 0);
8319 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8320 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8321 VMA_HEAVY_ASSERT(Validate());
8323 const VkDeviceSize size = GetSize();
8324 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8325 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8329 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8331 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8336 if(allocSize > size)
8340 VkDeviceSize resultBaseOffset = size - allocSize;
8341 if(!suballocations2nd.empty())
8343 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8344 resultBaseOffset = lastSuballoc.offset - allocSize;
8345 if(allocSize > lastSuballoc.offset)
8352 VkDeviceSize resultOffset = resultBaseOffset;
8355 if(VMA_DEBUG_MARGIN > 0)
8357 if(resultOffset < VMA_DEBUG_MARGIN)
8361 resultOffset -= VMA_DEBUG_MARGIN;
8365 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8369 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8371 bool bufferImageGranularityConflict =
false;
8372 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8374 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8375 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8377 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8379 bufferImageGranularityConflict =
true;
8387 if(bufferImageGranularityConflict)
8389 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8394 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8395 suballocations1st.back().offset + suballocations1st.back().size :
8397 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8401 if(bufferImageGranularity > 1)
8403 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8405 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8406 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8408 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8422 pAllocationRequest->offset = resultOffset;
8423 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8424 pAllocationRequest->sumItemSize = 0;
8426 pAllocationRequest->itemsToMakeLostCount = 0;
8432 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8436 VkDeviceSize resultBaseOffset = 0;
8437 if(!suballocations1st.empty())
8439 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8440 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8444 VkDeviceSize resultOffset = resultBaseOffset;
8447 if(VMA_DEBUG_MARGIN > 0)
8449 resultOffset += VMA_DEBUG_MARGIN;
8453 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8457 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8459 bool bufferImageGranularityConflict =
false;
8460 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8462 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8463 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8465 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8467 bufferImageGranularityConflict =
true;
8475 if(bufferImageGranularityConflict)
8477 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8481 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8482 suballocations2nd.back().offset : size;
8485 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8489 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8491 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8493 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8494 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8496 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8510 pAllocationRequest->offset = resultOffset;
8511 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8512 pAllocationRequest->sumItemSize = 0;
8514 pAllocationRequest->itemsToMakeLostCount = 0;
8521 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8523 VMA_ASSERT(!suballocations1st.empty());
8525 VkDeviceSize resultBaseOffset = 0;
8526 if(!suballocations2nd.empty())
8528 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8529 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8533 VkDeviceSize resultOffset = resultBaseOffset;
8536 if(VMA_DEBUG_MARGIN > 0)
8538 resultOffset += VMA_DEBUG_MARGIN;
8542 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8546 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8548 bool bufferImageGranularityConflict =
false;
8549 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8551 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8552 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8554 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8556 bufferImageGranularityConflict =
true;
8564 if(bufferImageGranularityConflict)
8566 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8570 pAllocationRequest->itemsToMakeLostCount = 0;
8571 pAllocationRequest->sumItemSize = 0;
8572 size_t index1st = m_1stNullItemsBeginCount;
8574 if(canMakeOtherLost)
8576 while(index1st < suballocations1st.size() &&
8577 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8580 const VmaSuballocation& suballoc = suballocations1st[index1st];
8581 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
8587 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8588 if(suballoc.hAllocation->CanBecomeLost() &&
8589 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8591 ++pAllocationRequest->itemsToMakeLostCount;
8592 pAllocationRequest->sumItemSize += suballoc.size;
8604 if(bufferImageGranularity > 1)
8606 while(index1st < suballocations1st.size())
8608 const VmaSuballocation& suballoc = suballocations1st[index1st];
8609 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
8611 if(suballoc.hAllocation != VK_NULL_HANDLE)
8614 if(suballoc.hAllocation->CanBecomeLost() &&
8615 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8617 ++pAllocationRequest->itemsToMakeLostCount;
8618 pAllocationRequest->sumItemSize += suballoc.size;
8637 if(index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size ||
8638 index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset)
8642 if(bufferImageGranularity > 1)
8644 for(
size_t nextSuballocIndex = index1st;
8645 nextSuballocIndex < suballocations1st.size();
8646 nextSuballocIndex++)
8648 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
8649 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8651 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8665 pAllocationRequest->offset = resultOffset;
8666 pAllocationRequest->sumFreeSize =
8667 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
8669 - pAllocationRequest->sumItemSize;
8679 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
8680 uint32_t currentFrameIndex,
8681 uint32_t frameInUseCount,
8682 VmaAllocationRequest* pAllocationRequest)
8684 if(pAllocationRequest->itemsToMakeLostCount == 0)
8689 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
8691 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8692 size_t index1st = m_1stNullItemsBeginCount;
8693 size_t madeLostCount = 0;
8694 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
8696 VMA_ASSERT(index1st < suballocations1st.size());
8697 VmaSuballocation& suballoc = suballocations1st[index1st];
8698 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8700 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8701 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
8702 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8704 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8705 suballoc.hAllocation = VK_NULL_HANDLE;
8706 m_SumFreeSize += suballoc.size;
8707 ++m_1stNullItemsMiddleCount;
8724 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8726 uint32_t lostAllocationCount = 0;
8728 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8729 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8731 VmaSuballocation& suballoc = suballocations1st[i];
8732 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8733 suballoc.hAllocation->CanBecomeLost() &&
8734 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8736 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8737 suballoc.hAllocation = VK_NULL_HANDLE;
8738 ++m_1stNullItemsMiddleCount;
8739 m_SumFreeSize += suballoc.size;
8740 ++lostAllocationCount;
8744 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8745 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8747 VmaSuballocation& suballoc = suballocations2nd[i];
8748 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8749 suballoc.hAllocation->CanBecomeLost() &&
8750 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8752 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8753 suballoc.hAllocation = VK_NULL_HANDLE;
8754 ++m_2ndNullItemsCount;
8755 ++lostAllocationCount;
8759 if(lostAllocationCount)
8764 return lostAllocationCount;
8767 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
8769 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8770 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8772 const VmaSuballocation& suballoc = suballocations1st[i];
8773 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8775 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
8777 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8778 return VK_ERROR_VALIDATION_FAILED_EXT;
8780 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8782 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8783 return VK_ERROR_VALIDATION_FAILED_EXT;
8788 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8789 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8791 const VmaSuballocation& suballoc = suballocations2nd[i];
8792 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8794 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
8796 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8797 return VK_ERROR_VALIDATION_FAILED_EXT;
8799 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8801 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8802 return VK_ERROR_VALIDATION_FAILED_EXT;
8810 void VmaBlockMetadata_Linear::Alloc(
8811 const VmaAllocationRequest& request,
8812 VmaSuballocationType type,
8813 VkDeviceSize allocSize,
8817 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
8821 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
8822 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
8823 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8824 suballocations2nd.push_back(newSuballoc);
8825 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
8829 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8832 if(suballocations1st.empty())
8834 suballocations1st.push_back(newSuballoc);
8839 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
8842 VMA_ASSERT(request.offset + allocSize <= GetSize());
8843 suballocations1st.push_back(newSuballoc);
8846 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
8848 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8850 switch(m_2ndVectorMode)
8852 case SECOND_VECTOR_EMPTY:
8854 VMA_ASSERT(suballocations2nd.empty());
8855 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
8857 case SECOND_VECTOR_RING_BUFFER:
8859 VMA_ASSERT(!suballocations2nd.empty());
8861 case SECOND_VECTOR_DOUBLE_STACK:
8862 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
8868 suballocations2nd.push_back(newSuballoc);
8872 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
8877 m_SumFreeSize -= newSuballoc.size;
8880 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
8882 FreeAtOffset(allocation->GetOffset());
8885 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
8887 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8888 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8890 if(!suballocations1st.empty())
8893 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8894 if(firstSuballoc.offset == offset)
8896 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8897 firstSuballoc.hAllocation = VK_NULL_HANDLE;
8898 m_SumFreeSize += firstSuballoc.size;
8899 ++m_1stNullItemsBeginCount;
8906 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
8907 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8909 VmaSuballocation& lastSuballoc = suballocations2nd.back();
8910 if(lastSuballoc.offset == offset)
8912 m_SumFreeSize += lastSuballoc.size;
8913 suballocations2nd.pop_back();
8919 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
8921 VmaSuballocation& lastSuballoc = suballocations1st.back();
8922 if(lastSuballoc.offset == offset)
8924 m_SumFreeSize += lastSuballoc.size;
8925 suballocations1st.pop_back();
8933 VmaSuballocation refSuballoc;
8934 refSuballoc.offset = offset;
8936 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
8937 suballocations1st.begin() + m_1stNullItemsBeginCount,
8938 suballocations1st.end(),
8940 if(it != suballocations1st.end())
8942 it->type = VMA_SUBALLOCATION_TYPE_FREE;
8943 it->hAllocation = VK_NULL_HANDLE;
8944 ++m_1stNullItemsMiddleCount;
8945 m_SumFreeSize += it->size;
8951 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
8954 VmaSuballocation refSuballoc;
8955 refSuballoc.offset = offset;
8957 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
8958 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
8959 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
8960 if(it != suballocations2nd.end())
8962 it->type = VMA_SUBALLOCATION_TYPE_FREE;
8963 it->hAllocation = VK_NULL_HANDLE;
8964 ++m_2ndNullItemsCount;
8965 m_SumFreeSize += it->size;
8971 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
8974 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 8976 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
8977 const size_t suballocCount = AccessSuballocations1st().size();
8978 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
8981 void VmaBlockMetadata_Linear::CleanupAfterFree()
8983 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8984 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8988 suballocations1st.clear();
8989 suballocations2nd.clear();
8990 m_1stNullItemsBeginCount = 0;
8991 m_1stNullItemsMiddleCount = 0;
8992 m_2ndNullItemsCount = 0;
8993 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
8997 const size_t suballoc1stCount = suballocations1st.size();
8998 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
8999 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
9002 while(m_1stNullItemsBeginCount < suballoc1stCount &&
9003 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9005 ++m_1stNullItemsBeginCount;
9006 --m_1stNullItemsMiddleCount;
9010 while(m_1stNullItemsMiddleCount > 0 &&
9011 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
9013 --m_1stNullItemsMiddleCount;
9014 suballocations1st.pop_back();
9018 while(m_2ndNullItemsCount > 0 &&
9019 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
9021 --m_2ndNullItemsCount;
9022 suballocations2nd.pop_back();
9025 if(ShouldCompact1st())
9027 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
9028 size_t srcIndex = m_1stNullItemsBeginCount;
9029 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
9031 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
9035 if(dstIndex != srcIndex)
9037 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9041 suballocations1st.resize(nonNullItemCount);
9042 m_1stNullItemsBeginCount = 0;
9043 m_1stNullItemsMiddleCount = 0;
9047 if(suballocations2nd.empty())
9049 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9053 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9055 suballocations1st.clear();
9056 m_1stNullItemsBeginCount = 0;
9058 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9061 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9062 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9063 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9064 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9066 ++m_1stNullItemsBeginCount;
9067 --m_1stNullItemsMiddleCount;
9069 m_2ndNullItemsCount = 0;
9070 m_1stVectorIndex ^= 1;
9075 VMA_HEAVY_ASSERT(Validate());
9082 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
9083 m_pMetadata(VMA_NULL),
9084 m_MemoryTypeIndex(UINT32_MAX),
9086 m_hMemory(VK_NULL_HANDLE),
9088 m_pMappedData(VMA_NULL)
9092 void VmaDeviceMemoryBlock::Init(
9094 uint32_t newMemoryTypeIndex,
9095 VkDeviceMemory newMemory,
9096 VkDeviceSize newSize,
9098 bool linearAlgorithm)
9100 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
9102 m_MemoryTypeIndex = newMemoryTypeIndex;
9104 m_hMemory = newMemory;
9108 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
9112 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
9114 m_pMetadata->Init(newSize);
9117 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
9121 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
9123 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
9124 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
9125 m_hMemory = VK_NULL_HANDLE;
9127 vma_delete(allocator, m_pMetadata);
9128 m_pMetadata = VMA_NULL;
9131 bool VmaDeviceMemoryBlock::Validate()
const 9133 if((m_hMemory == VK_NULL_HANDLE) ||
9134 (m_pMetadata->GetSize() == 0))
9139 return m_pMetadata->Validate();
9142 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
9144 void* pData =
nullptr;
9145 VkResult res = Map(hAllocator, 1, &pData);
9146 if(res != VK_SUCCESS)
9151 res = m_pMetadata->CheckCorruption(pData);
9153 Unmap(hAllocator, 1);
9158 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
9165 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9168 m_MapCount += count;
9169 VMA_ASSERT(m_pMappedData != VMA_NULL);
9170 if(ppData != VMA_NULL)
9172 *ppData = m_pMappedData;
9178 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9179 hAllocator->m_hDevice,
9185 if(result == VK_SUCCESS)
9187 if(ppData != VMA_NULL)
9189 *ppData = m_pMappedData;
9197 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
9204 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9205 if(m_MapCount >= count)
9207 m_MapCount -= count;
9210 m_pMappedData = VMA_NULL;
9211 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
9216 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
9220 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
9222 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
9223 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
9226 VkResult res = Map(hAllocator, 1, &pData);
9227 if(res != VK_SUCCESS)
9232 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
9233 VmaWriteMagicValue(pData, allocOffset + allocSize);
9235 Unmap(hAllocator, 1);
9240 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
9242 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
9243 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
9246 VkResult res = Map(hAllocator, 1, &pData);
9247 if(res != VK_SUCCESS)
9252 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
9254 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
9256 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
9258 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
9261 Unmap(hAllocator, 1);
9266 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
9271 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
9272 hAllocation->GetBlock() ==
this);
9274 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9275 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
9276 hAllocator->m_hDevice,
9279 hAllocation->GetOffset());
9282 VkResult VmaDeviceMemoryBlock::BindImageMemory(
9287 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
9288 hAllocation->GetBlock() ==
this);
9290 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9291 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
9292 hAllocator->m_hDevice,
9295 hAllocation->GetOffset());
9300 memset(&outInfo, 0,
sizeof(outInfo));
9319 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
9327 VmaPool_T::VmaPool_T(
9330 VkDeviceSize preferredBlockSize) :
9333 createInfo.memoryTypeIndex,
9334 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
9335 createInfo.minBlockCount,
9336 createInfo.maxBlockCount,
9338 createInfo.frameInUseCount,
9340 createInfo.blockSize != 0,
9346 VmaPool_T::~VmaPool_T()
9350 #if VMA_STATS_STRING_ENABLED 9352 #endif // #if VMA_STATS_STRING_ENABLED 9354 VmaBlockVector::VmaBlockVector(
9356 uint32_t memoryTypeIndex,
9357 VkDeviceSize preferredBlockSize,
9358 size_t minBlockCount,
9359 size_t maxBlockCount,
9360 VkDeviceSize bufferImageGranularity,
9361 uint32_t frameInUseCount,
9363 bool explicitBlockSize,
9364 bool linearAlgorithm) :
9365 m_hAllocator(hAllocator),
9366 m_MemoryTypeIndex(memoryTypeIndex),
9367 m_PreferredBlockSize(preferredBlockSize),
9368 m_MinBlockCount(minBlockCount),
9369 m_MaxBlockCount(maxBlockCount),
9370 m_BufferImageGranularity(bufferImageGranularity),
9371 m_FrameInUseCount(frameInUseCount),
9372 m_IsCustomPool(isCustomPool),
9373 m_ExplicitBlockSize(explicitBlockSize),
9374 m_LinearAlgorithm(linearAlgorithm),
9375 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
9376 m_HasEmptyBlock(false),
9377 m_pDefragmentator(VMA_NULL),
9382 VmaBlockVector::~VmaBlockVector()
9384 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
9386 for(
size_t i = m_Blocks.size(); i--; )
9388 m_Blocks[i]->Destroy(m_hAllocator);
9389 vma_delete(m_hAllocator, m_Blocks[i]);
9393 VkResult VmaBlockVector::CreateMinBlocks()
9395 for(
size_t i = 0; i < m_MinBlockCount; ++i)
9397 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
9398 if(res != VK_SUCCESS)
9406 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
9408 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
9410 const size_t blockCount = m_Blocks.size();
9419 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
9421 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
9423 VMA_HEAVY_ASSERT(pBlock->Validate());
9424 pBlock->m_pMetadata->AddPoolStats(*pStats);
9428 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 9430 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
9431 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
9432 (VMA_DEBUG_MARGIN > 0) &&
9433 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
9436 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
9438 VkResult VmaBlockVector::Allocate(
9440 uint32_t currentFrameIndex,
9442 VkDeviceSize alignment,
9444 VmaSuballocationType suballocType,
9451 const bool canCreateNewBlock =
9453 (m_Blocks.size() < m_MaxBlockCount);
9457 if(m_LinearAlgorithm && m_MaxBlockCount > 1)
9459 canMakeOtherLost =
false;
9463 if(isUpperAddress &&
9464 (!m_LinearAlgorithm || m_MaxBlockCount > 1))
9466 return VK_ERROR_FEATURE_NOT_PRESENT;
9470 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
9472 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9475 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
9482 if(!canMakeOtherLost || canCreateNewBlock)
9488 if(m_LinearAlgorithm)
9491 if(!m_Blocks.empty())
9493 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
9494 VMA_ASSERT(pCurrBlock);
9495 VkResult res = AllocateFromBlock(
9505 if(res == VK_SUCCESS)
9507 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
9515 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
9517 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
9518 VMA_ASSERT(pCurrBlock);
9519 VkResult res = AllocateFromBlock(
9529 if(res == VK_SUCCESS)
9531 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
9538 if(canCreateNewBlock)
9541 VkDeviceSize newBlockSize = m_PreferredBlockSize;
9542 uint32_t newBlockSizeShift = 0;
9543 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
9545 if(!m_ExplicitBlockSize)
9548 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
9549 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
9551 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
9552 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
9554 newBlockSize = smallerNewBlockSize;
9555 ++newBlockSizeShift;
9564 size_t newBlockIndex = 0;
9565 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
9567 if(!m_ExplicitBlockSize)
9569 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
9571 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
9572 if(smallerNewBlockSize >= size)
9574 newBlockSize = smallerNewBlockSize;
9575 ++newBlockSizeShift;
9576 res = CreateBlock(newBlockSize, &newBlockIndex);
9585 if(res == VK_SUCCESS)
9587 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
9588 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
9590 res = AllocateFromBlock(
9600 if(res == VK_SUCCESS)
9602 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
9608 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9615 if(canMakeOtherLost)
9617 uint32_t tryIndex = 0;
9618 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
9620 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
9621 VmaAllocationRequest bestRequest = {};
9622 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
9626 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
9628 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
9629 VMA_ASSERT(pCurrBlock);
9630 VmaAllocationRequest currRequest = {};
9631 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
9634 m_BufferImageGranularity,
9642 const VkDeviceSize currRequestCost = currRequest.CalcCost();
9643 if(pBestRequestBlock == VMA_NULL ||
9644 currRequestCost < bestRequestCost)
9646 pBestRequestBlock = pCurrBlock;
9647 bestRequest = currRequest;
9648 bestRequestCost = currRequestCost;
9650 if(bestRequestCost == 0)
9658 if(pBestRequestBlock != VMA_NULL)
9662 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
9663 if(res != VK_SUCCESS)
9669 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
9675 if(pBestRequestBlock->m_pMetadata->IsEmpty())
9677 m_HasEmptyBlock =
false;
9680 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
9681 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
9682 (*pAllocation)->InitBlockAllocation(
9691 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
9692 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
9693 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
9694 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
9696 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
9698 if(IsCorruptionDetectionEnabled())
9700 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
9701 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
9716 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
9718 return VK_ERROR_TOO_MANY_OBJECTS;
9722 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9725 void VmaBlockVector::Free(
9728 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
9732 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
9734 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
9736 if(IsCorruptionDetectionEnabled())
9738 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
9739 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
9742 if(hAllocation->IsPersistentMap())
9744 pBlock->Unmap(m_hAllocator, 1);
9747 pBlock->m_pMetadata->Free(hAllocation);
9748 VMA_HEAVY_ASSERT(pBlock->Validate());
9750 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
9753 if(pBlock->m_pMetadata->IsEmpty())
9756 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
9758 pBlockToDelete = pBlock;
9764 m_HasEmptyBlock =
true;
9769 else if(m_HasEmptyBlock)
9771 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
9772 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
9774 pBlockToDelete = pLastBlock;
9775 m_Blocks.pop_back();
9776 m_HasEmptyBlock =
false;
9780 IncrementallySortBlocks();
9785 if(pBlockToDelete != VMA_NULL)
9787 VMA_DEBUG_LOG(
" Deleted empty allocation");
9788 pBlockToDelete->Destroy(m_hAllocator);
9789 vma_delete(m_hAllocator, pBlockToDelete);
9793 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 9795 VkDeviceSize result = 0;
9796 for(
size_t i = m_Blocks.size(); i--; )
9798 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
9799 if(result >= m_PreferredBlockSize)
9807 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
9809 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
9811 if(m_Blocks[blockIndex] == pBlock)
9813 VmaVectorRemove(m_Blocks, blockIndex);
9820 void VmaBlockVector::IncrementallySortBlocks()
9822 if(!m_LinearAlgorithm)
9825 for(
size_t i = 1; i < m_Blocks.size(); ++i)
9827 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
9829 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
9836 VkResult VmaBlockVector::AllocateFromBlock(
9837 VmaDeviceMemoryBlock* pBlock,
9839 uint32_t currentFrameIndex,
9841 VkDeviceSize alignment,
9844 VmaSuballocationType suballocType,
9852 VmaAllocationRequest currRequest = {};
9853 if(pBlock->m_pMetadata->CreateAllocationRequest(
9856 m_BufferImageGranularity,
9865 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
9869 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
9870 if(res != VK_SUCCESS)
9877 if(pBlock->m_pMetadata->IsEmpty())
9879 m_HasEmptyBlock =
false;
9882 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
9883 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
9884 (*pAllocation)->InitBlockAllocation(
9893 VMA_HEAVY_ASSERT(pBlock->Validate());
9894 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
9895 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
9897 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
9899 if(IsCorruptionDetectionEnabled())
9901 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
9902 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
9906 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
9909 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
9911 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
9912 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
9913 allocInfo.allocationSize = blockSize;
9914 VkDeviceMemory mem = VK_NULL_HANDLE;
9915 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
9924 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
9929 allocInfo.allocationSize,
9933 m_Blocks.push_back(pBlock);
9934 if(pNewBlockIndex != VMA_NULL)
9936 *pNewBlockIndex = m_Blocks.size() - 1;
9942 #if VMA_STATS_STRING_ENABLED 9944 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
9946 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
9952 json.WriteString(
"MemoryTypeIndex");
9953 json.WriteNumber(m_MemoryTypeIndex);
9955 json.WriteString(
"BlockSize");
9956 json.WriteNumber(m_PreferredBlockSize);
9958 json.WriteString(
"BlockCount");
9959 json.BeginObject(
true);
9960 if(m_MinBlockCount > 0)
9962 json.WriteString(
"Min");
9963 json.WriteNumber((uint64_t)m_MinBlockCount);
9965 if(m_MaxBlockCount < SIZE_MAX)
9967 json.WriteString(
"Max");
9968 json.WriteNumber((uint64_t)m_MaxBlockCount);
9970 json.WriteString(
"Cur");
9971 json.WriteNumber((uint64_t)m_Blocks.size());
9974 if(m_FrameInUseCount > 0)
9976 json.WriteString(
"FrameInUseCount");
9977 json.WriteNumber(m_FrameInUseCount);
9980 if(m_LinearAlgorithm)
9982 json.WriteString(
"LinearAlgorithm");
9983 json.WriteBool(
true);
9988 json.WriteString(
"PreferredBlockSize");
9989 json.WriteNumber(m_PreferredBlockSize);
9992 json.WriteString(
"Blocks");
9994 for(
size_t i = 0; i < m_Blocks.size(); ++i)
9997 json.ContinueString(m_Blocks[i]->GetId());
10000 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
10007 #endif // #if VMA_STATS_STRING_ENABLED 10009 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
10011 uint32_t currentFrameIndex)
10013 if(m_pDefragmentator == VMA_NULL)
10015 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
10018 currentFrameIndex);
10021 return m_pDefragmentator;
10024 VkResult VmaBlockVector::Defragment(
10026 VkDeviceSize& maxBytesToMove,
10027 uint32_t& maxAllocationsToMove)
10029 if(m_pDefragmentator == VMA_NULL)
10034 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10037 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
10040 if(pDefragmentationStats != VMA_NULL)
10042 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
10043 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
10046 VMA_ASSERT(bytesMoved <= maxBytesToMove);
10047 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
10053 m_HasEmptyBlock =
false;
10054 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10056 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
10057 if(pBlock->m_pMetadata->IsEmpty())
10059 if(m_Blocks.size() > m_MinBlockCount)
10061 if(pDefragmentationStats != VMA_NULL)
10064 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
10067 VmaVectorRemove(m_Blocks, blockIndex);
10068 pBlock->Destroy(m_hAllocator);
10069 vma_delete(m_hAllocator, pBlock);
10073 m_HasEmptyBlock =
true;
10081 void VmaBlockVector::DestroyDefragmentator()
10083 if(m_pDefragmentator != VMA_NULL)
10085 vma_delete(m_hAllocator, m_pDefragmentator);
10086 m_pDefragmentator = VMA_NULL;
10090 void VmaBlockVector::MakePoolAllocationsLost(
10091 uint32_t currentFrameIndex,
10092 size_t* pLostAllocationCount)
10094 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10095 size_t lostAllocationCount = 0;
10096 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10098 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10099 VMA_ASSERT(pBlock);
10100 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
10102 if(pLostAllocationCount != VMA_NULL)
10104 *pLostAllocationCount = lostAllocationCount;
10108 VkResult VmaBlockVector::CheckCorruption()
10110 if(!IsCorruptionDetectionEnabled())
10112 return VK_ERROR_FEATURE_NOT_PRESENT;
10115 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10116 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10118 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10119 VMA_ASSERT(pBlock);
10120 VkResult res = pBlock->CheckCorruption(m_hAllocator);
10121 if(res != VK_SUCCESS)
10129 void VmaBlockVector::AddStats(
VmaStats* pStats)
10131 const uint32_t memTypeIndex = m_MemoryTypeIndex;
10132 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
10134 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10136 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10138 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10139 VMA_ASSERT(pBlock);
10140 VMA_HEAVY_ASSERT(pBlock->Validate());
10142 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
10143 VmaAddStatInfo(pStats->
total, allocationStatInfo);
10144 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
10145 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
10152 VmaDefragmentator::VmaDefragmentator(
10154 VmaBlockVector* pBlockVector,
10155 uint32_t currentFrameIndex) :
10156 m_hAllocator(hAllocator),
10157 m_pBlockVector(pBlockVector),
10158 m_CurrentFrameIndex(currentFrameIndex),
10160 m_AllocationsMoved(0),
10161 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
10162 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
10164 VMA_ASSERT(!pBlockVector->UsesLinearAlgorithm());
10167 VmaDefragmentator::~VmaDefragmentator()
10169 for(
size_t i = m_Blocks.size(); i--; )
10171 vma_delete(m_hAllocator, m_Blocks[i]);
10175 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
10177 AllocationInfo allocInfo;
10178 allocInfo.m_hAllocation = hAlloc;
10179 allocInfo.m_pChanged = pChanged;
10180 m_Allocations.push_back(allocInfo);
10183 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
10186 if(m_pMappedDataForDefragmentation)
10188 *ppMappedData = m_pMappedDataForDefragmentation;
10193 if(m_pBlock->GetMappedData())
10195 *ppMappedData = m_pBlock->GetMappedData();
10200 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
10201 *ppMappedData = m_pMappedDataForDefragmentation;
10205 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
10207 if(m_pMappedDataForDefragmentation != VMA_NULL)
10209 m_pBlock->Unmap(hAllocator, 1);
10213 VkResult VmaDefragmentator::DefragmentRound(
10214 VkDeviceSize maxBytesToMove,
10215 uint32_t maxAllocationsToMove)
10217 if(m_Blocks.empty())
10222 size_t srcBlockIndex = m_Blocks.size() - 1;
10223 size_t srcAllocIndex = SIZE_MAX;
10229 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
10231 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
10234 if(srcBlockIndex == 0)
10241 srcAllocIndex = SIZE_MAX;
10246 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
10250 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
10251 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
10253 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
10254 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
10255 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
10256 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
10259 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
10261 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
10262 VmaAllocationRequest dstAllocRequest;
10263 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
10264 m_CurrentFrameIndex,
10265 m_pBlockVector->GetFrameInUseCount(),
10266 m_pBlockVector->GetBufferImageGranularity(),
10272 &dstAllocRequest) &&
10274 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
10276 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
10279 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
10280 (m_BytesMoved + size > maxBytesToMove))
10282 return VK_INCOMPLETE;
10285 void* pDstMappedData = VMA_NULL;
10286 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
10287 if(res != VK_SUCCESS)
10292 void* pSrcMappedData = VMA_NULL;
10293 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
10294 if(res != VK_SUCCESS)
10301 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
10302 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
10303 static_cast<size_t>(size));
10305 if(VMA_DEBUG_MARGIN > 0)
10307 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
10308 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
10311 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
10316 allocInfo.m_hAllocation);
10317 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
10319 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
10321 if(allocInfo.m_pChanged != VMA_NULL)
10323 *allocInfo.m_pChanged = VK_TRUE;
10326 ++m_AllocationsMoved;
10327 m_BytesMoved += size;
10329 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
10337 if(srcAllocIndex > 0)
10343 if(srcBlockIndex > 0)
10346 srcAllocIndex = SIZE_MAX;
10356 VkResult VmaDefragmentator::Defragment(
10357 VkDeviceSize maxBytesToMove,
10358 uint32_t maxAllocationsToMove)
10360 if(m_Allocations.empty())
10366 const size_t blockCount = m_pBlockVector->m_Blocks.size();
10367 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10369 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
10370 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
10371 m_Blocks.push_back(pBlockInfo);
10375 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
10378 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
10380 AllocationInfo& allocInfo = m_Allocations[blockIndex];
10382 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
10384 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
10385 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
10386 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
10388 (*it)->m_Allocations.push_back(allocInfo);
10396 m_Allocations.clear();
10398 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10400 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
10401 pBlockInfo->CalcHasNonMovableAllocations();
10402 pBlockInfo->SortAllocationsBySizeDescecnding();
10406 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
10409 VkResult result = VK_SUCCESS;
10410 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
10412 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
10416 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10418 m_Blocks[blockIndex]->Unmap(m_hAllocator);
10424 bool VmaDefragmentator::MoveMakesSense(
10425 size_t dstBlockIndex, VkDeviceSize dstOffset,
10426 size_t srcBlockIndex, VkDeviceSize srcOffset)
10428 if(dstBlockIndex < srcBlockIndex)
10432 if(dstBlockIndex > srcBlockIndex)
10436 if(dstOffset < srcOffset)
10446 #if VMA_RECORDING_ENABLED 10448 VmaRecorder::VmaRecorder() :
10453 m_StartCounter(INT64_MAX)
10459 m_UseMutex = useMutex;
10460 m_Flags = settings.
flags;
10462 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
10463 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
10466 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
10469 return VK_ERROR_INITIALIZATION_FAILED;
10473 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
10474 fprintf(m_File,
"%s\n",
"1,3");
10479 VmaRecorder::~VmaRecorder()
10481 if(m_File != VMA_NULL)
10487 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
10489 CallParams callParams;
10490 GetBasicParams(callParams);
10492 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10493 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
10497 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
10499 CallParams callParams;
10500 GetBasicParams(callParams);
10502 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10503 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
10509 CallParams callParams;
10510 GetBasicParams(callParams);
10512 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10513 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
10524 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
10526 CallParams callParams;
10527 GetBasicParams(callParams);
10529 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10530 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
10535 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
10536 const VkMemoryRequirements& vkMemReq,
10540 CallParams callParams;
10541 GetBasicParams(callParams);
10543 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10544 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
10545 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10547 vkMemReq.alignment,
10548 vkMemReq.memoryTypeBits,
10556 userDataStr.GetString());
10560 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
10561 const VkMemoryRequirements& vkMemReq,
10562 bool requiresDedicatedAllocation,
10563 bool prefersDedicatedAllocation,
10567 CallParams callParams;
10568 GetBasicParams(callParams);
10570 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10571 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
10572 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10574 vkMemReq.alignment,
10575 vkMemReq.memoryTypeBits,
10576 requiresDedicatedAllocation ? 1 : 0,
10577 prefersDedicatedAllocation ? 1 : 0,
10585 userDataStr.GetString());
10589 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
10590 const VkMemoryRequirements& vkMemReq,
10591 bool requiresDedicatedAllocation,
10592 bool prefersDedicatedAllocation,
10596 CallParams callParams;
10597 GetBasicParams(callParams);
10599 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10600 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
10601 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10603 vkMemReq.alignment,
10604 vkMemReq.memoryTypeBits,
10605 requiresDedicatedAllocation ? 1 : 0,
10606 prefersDedicatedAllocation ? 1 : 0,
10614 userDataStr.GetString());
10618 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
10621 CallParams callParams;
10622 GetBasicParams(callParams);
10624 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10625 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
10630 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
10632 const void* pUserData)
10634 CallParams callParams;
10635 GetBasicParams(callParams);
10637 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10638 UserDataString userDataStr(
10641 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10643 userDataStr.GetString());
10647 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
10650 CallParams callParams;
10651 GetBasicParams(callParams);
10653 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10654 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
10659 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
10662 CallParams callParams;
10663 GetBasicParams(callParams);
10665 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10666 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
10671 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
10674 CallParams callParams;
10675 GetBasicParams(callParams);
10677 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10678 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
10683 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
10684 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
10686 CallParams callParams;
10687 GetBasicParams(callParams);
10689 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10690 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
10697 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
10698 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
10700 CallParams callParams;
10701 GetBasicParams(callParams);
10703 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10704 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
10711 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
10712 const VkBufferCreateInfo& bufCreateInfo,
10716 CallParams callParams;
10717 GetBasicParams(callParams);
10719 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10720 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
10721 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10722 bufCreateInfo.flags,
10723 bufCreateInfo.size,
10724 bufCreateInfo.usage,
10725 bufCreateInfo.sharingMode,
10726 allocCreateInfo.
flags,
10727 allocCreateInfo.
usage,
10731 allocCreateInfo.
pool,
10733 userDataStr.GetString());
10737 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
10738 const VkImageCreateInfo& imageCreateInfo,
10742 CallParams callParams;
10743 GetBasicParams(callParams);
10745 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10746 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
10747 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
10748 imageCreateInfo.flags,
10749 imageCreateInfo.imageType,
10750 imageCreateInfo.format,
10751 imageCreateInfo.extent.width,
10752 imageCreateInfo.extent.height,
10753 imageCreateInfo.extent.depth,
10754 imageCreateInfo.mipLevels,
10755 imageCreateInfo.arrayLayers,
10756 imageCreateInfo.samples,
10757 imageCreateInfo.tiling,
10758 imageCreateInfo.usage,
10759 imageCreateInfo.sharingMode,
10760 imageCreateInfo.initialLayout,
10761 allocCreateInfo.
flags,
10762 allocCreateInfo.
usage,
10766 allocCreateInfo.
pool,
10768 userDataStr.GetString());
10772 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
10775 CallParams callParams;
10776 GetBasicParams(callParams);
10778 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10779 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
10784 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
10787 CallParams callParams;
10788 GetBasicParams(callParams);
10790 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10791 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
10796 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
10799 CallParams callParams;
10800 GetBasicParams(callParams);
10802 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10803 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
10808 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
10811 CallParams callParams;
10812 GetBasicParams(callParams);
10814 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10815 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
10820 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
10823 CallParams callParams;
10824 GetBasicParams(callParams);
10826 VmaMutexLock lock(m_FileMutex, m_UseMutex);
10827 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
10834 if(pUserData != VMA_NULL)
10838 m_Str = (
const char*)pUserData;
10842 sprintf_s(m_PtrStr,
"%p", pUserData);
10852 void VmaRecorder::WriteConfiguration(
10853 const VkPhysicalDeviceProperties& devProps,
10854 const VkPhysicalDeviceMemoryProperties& memProps,
10855 bool dedicatedAllocationExtensionEnabled)
10857 fprintf(m_File,
"Config,Begin\n");
10859 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
10860 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
10861 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
10862 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
10863 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
10864 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
10866 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
10867 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
10868 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
10870 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
10871 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
10873 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
10874 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
10876 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
10877 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
10879 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
10880 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
10883 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
10885 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
10886 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
10887 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
10888 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
10889 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
10890 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
10891 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
10892 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
10893 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
10895 fprintf(m_File,
"Config,End\n");
10898 void VmaRecorder::GetBasicParams(CallParams& outParams)
10900 outParams.threadId = GetCurrentThreadId();
10902 LARGE_INTEGER counter;
10903 QueryPerformanceCounter(&counter);
10904 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
10907 void VmaRecorder::Flush()
10915 #endif // #if VMA_RECORDING_ENABLED 10923 m_hDevice(pCreateInfo->device),
10924 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
10925 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
10926 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
10927 m_PreferredLargeHeapBlockSize(0),
10928 m_PhysicalDevice(pCreateInfo->physicalDevice),
10929 m_CurrentFrameIndex(0),
10930 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
10933 ,m_pRecorder(VMA_NULL)
10936 if(VMA_DEBUG_DETECT_CORRUPTION)
10939 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
10944 #if !(VMA_DEDICATED_ALLOCATION) 10947 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
10951 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
10952 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
10953 memset(&m_MemProps, 0,
sizeof(m_MemProps));
10955 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
10956 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
10958 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
10960 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
10971 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
10972 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
10979 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
10981 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
10982 if(limit != VK_WHOLE_SIZE)
10984 m_HeapSizeLimit[heapIndex] = limit;
10985 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
10987 m_MemProps.memoryHeaps[heapIndex].size = limit;
10993 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
10995 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
10997 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
11000 preferredBlockSize,
11003 GetBufferImageGranularity(),
11010 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
11017 VkResult res = VK_SUCCESS;
11022 #if VMA_RECORDING_ENABLED 11023 m_pRecorder = vma_new(
this, VmaRecorder)();
11025 if(res != VK_SUCCESS)
11029 m_pRecorder->WriteConfiguration(
11030 m_PhysicalDeviceProperties,
11032 m_UseKhrDedicatedAllocation);
11033 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
11035 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
11036 return VK_ERROR_FEATURE_NOT_PRESENT;
11043 VmaAllocator_T::~VmaAllocator_T()
11045 #if VMA_RECORDING_ENABLED 11046 if(m_pRecorder != VMA_NULL)
11048 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
11049 vma_delete(
this, m_pRecorder);
11053 VMA_ASSERT(m_Pools.empty());
11055 for(
size_t i = GetMemoryTypeCount(); i--; )
11057 vma_delete(
this, m_pDedicatedAllocations[i]);
11058 vma_delete(
this, m_pBlockVectors[i]);
11062 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
11064 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11065 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
11066 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
11067 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
11068 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
11069 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
11070 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
11071 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
11072 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
11073 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
11074 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
11075 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
11076 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
11077 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
11078 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
11079 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
11080 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
11081 #if VMA_DEDICATED_ALLOCATION 11082 if(m_UseKhrDedicatedAllocation)
11084 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
11085 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
11086 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
11087 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
11089 #endif // #if VMA_DEDICATED_ALLOCATION 11090 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11092 #define VMA_COPY_IF_NOT_NULL(funcName) \ 11093 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 11095 if(pVulkanFunctions != VMA_NULL)
11097 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
11098 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
11099 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
11100 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
11101 VMA_COPY_IF_NOT_NULL(vkMapMemory);
11102 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
11103 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
11104 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
11105 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
11106 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
11107 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
11108 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
11109 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
11110 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
11111 VMA_COPY_IF_NOT_NULL(vkCreateImage);
11112 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
11113 #if VMA_DEDICATED_ALLOCATION 11114 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
11115 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
11119 #undef VMA_COPY_IF_NOT_NULL 11123 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
11124 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
11125 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
11126 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
11127 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
11128 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
11129 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
11130 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
11131 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
11132 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
11133 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
11134 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
11135 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
11136 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
11137 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
11138 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
11139 #if VMA_DEDICATED_ALLOCATION 11140 if(m_UseKhrDedicatedAllocation)
11142 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
11143 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
11148 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
11150 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
11151 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
11152 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
11153 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
11156 VkResult VmaAllocator_T::AllocateMemoryOfType(
11158 VkDeviceSize alignment,
11159 bool dedicatedAllocation,
11160 VkBuffer dedicatedBuffer,
11161 VkImage dedicatedImage,
11163 uint32_t memTypeIndex,
11164 VmaSuballocationType suballocType,
11167 VMA_ASSERT(pAllocation != VMA_NULL);
11168 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
11174 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
11179 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
11180 VMA_ASSERT(blockVector);
11182 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
11183 bool preferDedicatedMemory =
11184 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
11185 dedicatedAllocation ||
11187 size > preferredBlockSize / 2;
11189 if(preferDedicatedMemory &&
11191 finalCreateInfo.
pool == VK_NULL_HANDLE)
11200 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11204 return AllocateDedicatedMemory(
11218 VkResult res = blockVector->Allocate(
11220 m_CurrentFrameIndex.load(),
11226 if(res == VK_SUCCESS)
11234 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11238 res = AllocateDedicatedMemory(
11244 finalCreateInfo.pUserData,
11248 if(res == VK_SUCCESS)
11251 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
11257 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
11264 VkResult VmaAllocator_T::AllocateDedicatedMemory(
11266 VmaSuballocationType suballocType,
11267 uint32_t memTypeIndex,
11269 bool isUserDataString,
11271 VkBuffer dedicatedBuffer,
11272 VkImage dedicatedImage,
11275 VMA_ASSERT(pAllocation);
11277 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
11278 allocInfo.memoryTypeIndex = memTypeIndex;
11279 allocInfo.allocationSize = size;
11281 #if VMA_DEDICATED_ALLOCATION 11282 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
11283 if(m_UseKhrDedicatedAllocation)
11285 if(dedicatedBuffer != VK_NULL_HANDLE)
11287 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
11288 dedicatedAllocInfo.buffer = dedicatedBuffer;
11289 allocInfo.pNext = &dedicatedAllocInfo;
11291 else if(dedicatedImage != VK_NULL_HANDLE)
11293 dedicatedAllocInfo.image = dedicatedImage;
11294 allocInfo.pNext = &dedicatedAllocInfo;
11297 #endif // #if VMA_DEDICATED_ALLOCATION 11300 VkDeviceMemory hMemory = VK_NULL_HANDLE;
11301 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
11304 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
11308 void* pMappedData = VMA_NULL;
11311 res = (*m_VulkanFunctions.vkMapMemory)(
11320 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
11321 FreeVulkanMemory(memTypeIndex, size, hMemory);
11326 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
11327 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
11328 (*pAllocation)->SetUserData(
this, pUserData);
11329 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11331 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11336 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
11337 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
11338 VMA_ASSERT(pDedicatedAllocations);
11339 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
11342 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
11347 void VmaAllocator_T::GetBufferMemoryRequirements(
11349 VkMemoryRequirements& memReq,
11350 bool& requiresDedicatedAllocation,
11351 bool& prefersDedicatedAllocation)
const 11353 #if VMA_DEDICATED_ALLOCATION 11354 if(m_UseKhrDedicatedAllocation)
11356 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
11357 memReqInfo.buffer = hBuffer;
11359 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
11361 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
11362 memReq2.pNext = &memDedicatedReq;
11364 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
11366 memReq = memReq2.memoryRequirements;
11367 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
11368 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
11371 #endif // #if VMA_DEDICATED_ALLOCATION 11373 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
11374 requiresDedicatedAllocation =
false;
11375 prefersDedicatedAllocation =
false;
11379 void VmaAllocator_T::GetImageMemoryRequirements(
11381 VkMemoryRequirements& memReq,
11382 bool& requiresDedicatedAllocation,
11383 bool& prefersDedicatedAllocation)
const 11385 #if VMA_DEDICATED_ALLOCATION 11386 if(m_UseKhrDedicatedAllocation)
11388 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
11389 memReqInfo.image = hImage;
11391 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
11393 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
11394 memReq2.pNext = &memDedicatedReq;
11396 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
11398 memReq = memReq2.memoryRequirements;
11399 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
11400 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
11403 #endif // #if VMA_DEDICATED_ALLOCATION 11405 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
11406 requiresDedicatedAllocation =
false;
11407 prefersDedicatedAllocation =
false;
11411 VkResult VmaAllocator_T::AllocateMemory(
11412 const VkMemoryRequirements& vkMemReq,
11413 bool requiresDedicatedAllocation,
11414 bool prefersDedicatedAllocation,
11415 VkBuffer dedicatedBuffer,
11416 VkImage dedicatedImage,
11418 VmaSuballocationType suballocType,
11424 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
11425 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11428 (createInfo.
flags & VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT) != 0)
11430 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
11431 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11433 if(requiresDedicatedAllocation)
11437 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
11438 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11440 if(createInfo.
pool != VK_NULL_HANDLE)
11442 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
11443 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11446 if((createInfo.
pool != VK_NULL_HANDLE) &&
11449 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
11450 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11453 if(createInfo.
pool != VK_NULL_HANDLE)
11455 const VkDeviceSize alignmentForPool = VMA_MAX(
11456 vkMemReq.alignment,
11457 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
11458 return createInfo.
pool->m_BlockVector.Allocate(
11460 m_CurrentFrameIndex.load(),
11470 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
11471 uint32_t memTypeIndex = UINT32_MAX;
11473 if(res == VK_SUCCESS)
11475 VkDeviceSize alignmentForMemType = VMA_MAX(
11476 vkMemReq.alignment,
11477 GetMemoryTypeMinAlignment(memTypeIndex));
11479 res = AllocateMemoryOfType(
11481 alignmentForMemType,
11482 requiresDedicatedAllocation || prefersDedicatedAllocation,
11490 if(res == VK_SUCCESS)
11500 memoryTypeBits &= ~(1u << memTypeIndex);
11503 if(res == VK_SUCCESS)
11505 alignmentForMemType = VMA_MAX(
11506 vkMemReq.alignment,
11507 GetMemoryTypeMinAlignment(memTypeIndex));
11509 res = AllocateMemoryOfType(
11511 alignmentForMemType,
11512 requiresDedicatedAllocation || prefersDedicatedAllocation,
11520 if(res == VK_SUCCESS)
11530 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11541 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
11543 VMA_ASSERT(allocation);
11545 if(allocation->CanBecomeLost() ==
false ||
11546 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11548 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11550 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
11553 switch(allocation->GetType())
11555 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
11557 VmaBlockVector* pBlockVector = VMA_NULL;
11558 VmaPool hPool = allocation->GetPool();
11559 if(hPool != VK_NULL_HANDLE)
11561 pBlockVector = &hPool->m_BlockVector;
11565 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
11566 pBlockVector = m_pBlockVectors[memTypeIndex];
11568 pBlockVector->Free(allocation);
11571 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
11572 FreeDedicatedMemory(allocation);
11579 allocation->SetUserData(
this, VMA_NULL);
11580 vma_delete(
this, allocation);
11583 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
11586 InitStatInfo(pStats->
total);
11587 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
11589 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
11593 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11595 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
11596 VMA_ASSERT(pBlockVector);
11597 pBlockVector->AddStats(pStats);
11602 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
11603 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
11605 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
11610 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11612 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
11613 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
11614 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
11615 VMA_ASSERT(pDedicatedAllocVector);
11616 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
11619 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
11620 VmaAddStatInfo(pStats->
total, allocationStatInfo);
11621 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
11622 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11627 VmaPostprocessCalcStatInfo(pStats->
total);
11628 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
11629 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
11630 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
11631 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
11634 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
11636 VkResult VmaAllocator_T::Defragment(
11638 size_t allocationCount,
11639 VkBool32* pAllocationsChanged,
11643 if(pAllocationsChanged != VMA_NULL)
11645 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
11647 if(pDefragmentationStats != VMA_NULL)
11649 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
11652 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
11654 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
11656 const size_t poolCount = m_Pools.size();
11659 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11662 VMA_ASSERT(hAlloc);
11663 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
11665 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11666 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
11668 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
11670 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
11672 VmaBlockVector* pAllocBlockVector = VMA_NULL;
11674 const VmaPool hAllocPool = hAlloc->GetPool();
11676 if(hAllocPool != VK_NULL_HANDLE)
11679 if(!hAllocPool->m_BlockVector.UsesLinearAlgorithm())
11681 pAllocBlockVector = &hAllocPool->m_BlockVector;
11687 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
11690 if(pAllocBlockVector != VMA_NULL)
11692 VmaDefragmentator*
const pDefragmentator =
11693 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
11694 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
11695 &pAllocationsChanged[allocIndex] : VMA_NULL;
11696 pDefragmentator->AddAllocation(hAlloc, pChanged);
11701 VkResult result = VK_SUCCESS;
11705 VkDeviceSize maxBytesToMove = SIZE_MAX;
11706 uint32_t maxAllocationsToMove = UINT32_MAX;
11707 if(pDefragmentationInfo != VMA_NULL)
11714 for(uint32_t memTypeIndex = 0;
11715 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
11719 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
11721 result = m_pBlockVectors[memTypeIndex]->Defragment(
11722 pDefragmentationStats,
11724 maxAllocationsToMove);
11729 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
11731 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
11732 pDefragmentationStats,
11734 maxAllocationsToMove);
11740 for(
size_t poolIndex = poolCount; poolIndex--; )
11742 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
11746 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
11748 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
11750 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
11759 if(hAllocation->CanBecomeLost())
11765 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
11766 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
11769 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
11773 pAllocationInfo->
offset = 0;
11774 pAllocationInfo->
size = hAllocation->GetSize();
11776 pAllocationInfo->
pUserData = hAllocation->GetUserData();
11779 else if(localLastUseFrameIndex == localCurrFrameIndex)
11781 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
11782 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
11783 pAllocationInfo->
offset = hAllocation->GetOffset();
11784 pAllocationInfo->
size = hAllocation->GetSize();
11786 pAllocationInfo->
pUserData = hAllocation->GetUserData();
11791 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
11793 localLastUseFrameIndex = localCurrFrameIndex;
11800 #if VMA_STATS_STRING_ENABLED 11801 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
11802 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
11805 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
11806 if(localLastUseFrameIndex == localCurrFrameIndex)
11812 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
11814 localLastUseFrameIndex = localCurrFrameIndex;
11820 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
11821 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
11822 pAllocationInfo->
offset = hAllocation->GetOffset();
11823 pAllocationInfo->
size = hAllocation->GetSize();
11824 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
11825 pAllocationInfo->
pUserData = hAllocation->GetUserData();
11829 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
11832 if(hAllocation->CanBecomeLost())
11834 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
11835 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
11838 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
11842 else if(localLastUseFrameIndex == localCurrFrameIndex)
11848 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
11850 localLastUseFrameIndex = localCurrFrameIndex;
11857 #if VMA_STATS_STRING_ENABLED 11858 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
11859 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
11862 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
11863 if(localLastUseFrameIndex == localCurrFrameIndex)
11869 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
11871 localLastUseFrameIndex = localCurrFrameIndex;
11883 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
11895 return VK_ERROR_INITIALIZATION_FAILED;
11898 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
11900 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
11902 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
11903 if(res != VK_SUCCESS)
11905 vma_delete(
this, *pPool);
11912 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
11913 (*pPool)->SetId(m_NextPoolId++);
11914 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
11920 void VmaAllocator_T::DestroyPool(
VmaPool pool)
11924 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
11925 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
11926 VMA_ASSERT(success &&
"Pool not found in Allocator.");
11929 vma_delete(
this, pool);
11934 pool->m_BlockVector.GetPoolStats(pPoolStats);
11937 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
11939 m_CurrentFrameIndex.store(frameIndex);
11942 void VmaAllocator_T::MakePoolAllocationsLost(
11944 size_t* pLostAllocationCount)
11946 hPool->m_BlockVector.MakePoolAllocationsLost(
11947 m_CurrentFrameIndex.load(),
11948 pLostAllocationCount);
11951 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
11953 return hPool->m_BlockVector.CheckCorruption();
11956 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
11958 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
11961 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11963 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
11965 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
11966 VMA_ASSERT(pBlockVector);
11967 VkResult localRes = pBlockVector->CheckCorruption();
11970 case VK_ERROR_FEATURE_NOT_PRESENT:
11973 finalRes = VK_SUCCESS;
11983 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
11984 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
11986 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
11988 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
11991 case VK_ERROR_FEATURE_NOT_PRESENT:
11994 finalRes = VK_SUCCESS;
12006 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
12008 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
12009 (*pAllocation)->InitLost();
12012 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
12014 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
12017 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
12019 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
12020 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
12022 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
12023 if(res == VK_SUCCESS)
12025 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
12030 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
12035 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
12038 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
12040 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
12046 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
12048 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
12050 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
12053 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
12055 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
12056 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
12058 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
12059 m_HeapSizeLimit[heapIndex] += size;
12063 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
12065 if(hAllocation->CanBecomeLost())
12067 return VK_ERROR_MEMORY_MAP_FAILED;
12070 switch(hAllocation->GetType())
12072 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12074 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12075 char *pBytes = VMA_NULL;
12076 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
12077 if(res == VK_SUCCESS)
12079 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
12080 hAllocation->BlockAllocMap();
12084 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12085 return hAllocation->DedicatedAllocMap(
this, ppData);
12088 return VK_ERROR_MEMORY_MAP_FAILED;
12094 switch(hAllocation->GetType())
12096 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12098 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12099 hAllocation->BlockAllocUnmap();
12100 pBlock->Unmap(
this, 1);
12103 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12104 hAllocation->DedicatedAllocUnmap(
this);
12111 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
12113 VkResult res = VK_SUCCESS;
12114 switch(hAllocation->GetType())
12116 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12117 res = GetVulkanFunctions().vkBindBufferMemory(
12120 hAllocation->GetMemory(),
12123 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12125 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12126 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
12127 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
12136 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
12138 VkResult res = VK_SUCCESS;
12139 switch(hAllocation->GetType())
12141 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12142 res = GetVulkanFunctions().vkBindImageMemory(
12145 hAllocation->GetMemory(),
12148 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12150 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12151 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
12152 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
12161 void VmaAllocator_T::FlushOrInvalidateAllocation(
12163 VkDeviceSize offset, VkDeviceSize size,
12164 VMA_CACHE_OPERATION op)
12166 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
12167 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
12169 const VkDeviceSize allocationSize = hAllocation->GetSize();
12170 VMA_ASSERT(offset <= allocationSize);
12172 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12174 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12175 memRange.memory = hAllocation->GetMemory();
12177 switch(hAllocation->GetType())
12179 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12180 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
12181 if(size == VK_WHOLE_SIZE)
12183 memRange.size = allocationSize - memRange.offset;
12187 VMA_ASSERT(offset + size <= allocationSize);
12188 memRange.size = VMA_MIN(
12189 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
12190 allocationSize - memRange.offset);
12194 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12197 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
12198 if(size == VK_WHOLE_SIZE)
12200 size = allocationSize - offset;
12204 VMA_ASSERT(offset + size <= allocationSize);
12206 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
12209 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
12210 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
12211 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
12212 memRange.offset += allocationOffset;
12213 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
12224 case VMA_CACHE_FLUSH:
12225 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
12227 case VMA_CACHE_INVALIDATE:
12228 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
12237 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
12239 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
12241 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12243 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12244 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12245 VMA_ASSERT(pDedicatedAllocations);
12246 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
12247 VMA_ASSERT(success);
12250 VkDeviceMemory hMemory = allocation->GetMemory();
12252 if(allocation->GetMappedData() != VMA_NULL)
12254 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
12257 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
12259 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
12262 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
12264 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
12265 !hAllocation->CanBecomeLost() &&
12266 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12268 void* pData = VMA_NULL;
12269 VkResult res = Map(hAllocation, &pData);
12270 if(res == VK_SUCCESS)
12272 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
12273 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
12274 Unmap(hAllocation);
12278 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
12283 #if VMA_STATS_STRING_ENABLED 12285 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
12287 bool dedicatedAllocationsStarted =
false;
12288 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12290 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12291 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12292 VMA_ASSERT(pDedicatedAllocVector);
12293 if(pDedicatedAllocVector->empty() ==
false)
12295 if(dedicatedAllocationsStarted ==
false)
12297 dedicatedAllocationsStarted =
true;
12298 json.WriteString(
"DedicatedAllocations");
12299 json.BeginObject();
12302 json.BeginString(
"Type ");
12303 json.ContinueString(memTypeIndex);
12308 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
12310 json.BeginObject(
true);
12312 hAlloc->PrintParameters(json);
12319 if(dedicatedAllocationsStarted)
12325 bool allocationsStarted =
false;
12326 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12328 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
12330 if(allocationsStarted ==
false)
12332 allocationsStarted =
true;
12333 json.WriteString(
"DefaultPools");
12334 json.BeginObject();
12337 json.BeginString(
"Type ");
12338 json.ContinueString(memTypeIndex);
12341 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
12344 if(allocationsStarted)
12352 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12353 const size_t poolCount = m_Pools.size();
12356 json.WriteString(
"Pools");
12357 json.BeginObject();
12358 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
12360 json.BeginString();
12361 json.ContinueString(m_Pools[poolIndex]->GetId());
12364 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
12371 #endif // #if VMA_STATS_STRING_ENABLED 12380 VMA_ASSERT(pCreateInfo && pAllocator);
12381 VMA_DEBUG_LOG(
"vmaCreateAllocator");
12383 return (*pAllocator)->Init(pCreateInfo);
12389 if(allocator != VK_NULL_HANDLE)
12391 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
12392 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
12393 vma_delete(&allocationCallbacks, allocator);
12399 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
12401 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
12402 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
12407 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
12409 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
12410 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
12415 uint32_t memoryTypeIndex,
12416 VkMemoryPropertyFlags* pFlags)
12418 VMA_ASSERT(allocator && pFlags);
12419 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
12420 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
12425 uint32_t frameIndex)
12427 VMA_ASSERT(allocator);
12428 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
12430 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12432 allocator->SetCurrentFrameIndex(frameIndex);
12439 VMA_ASSERT(allocator && pStats);
12440 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12441 allocator->CalculateStats(pStats);
12444 #if VMA_STATS_STRING_ENABLED 12448 char** ppStatsString,
12449 VkBool32 detailedMap)
12451 VMA_ASSERT(allocator && ppStatsString);
12452 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12454 VmaStringBuilder sb(allocator);
12456 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
12457 json.BeginObject();
12460 allocator->CalculateStats(&stats);
12462 json.WriteString(
"Total");
12463 VmaPrintStatInfo(json, stats.
total);
12465 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
12467 json.BeginString(
"Heap ");
12468 json.ContinueString(heapIndex);
12470 json.BeginObject();
12472 json.WriteString(
"Size");
12473 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
12475 json.WriteString(
"Flags");
12476 json.BeginArray(
true);
12477 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
12479 json.WriteString(
"DEVICE_LOCAL");
12485 json.WriteString(
"Stats");
12486 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
12489 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
12491 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
12493 json.BeginString(
"Type ");
12494 json.ContinueString(typeIndex);
12497 json.BeginObject();
12499 json.WriteString(
"Flags");
12500 json.BeginArray(
true);
12501 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
12502 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
12504 json.WriteString(
"DEVICE_LOCAL");
12506 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12508 json.WriteString(
"HOST_VISIBLE");
12510 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
12512 json.WriteString(
"HOST_COHERENT");
12514 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
12516 json.WriteString(
"HOST_CACHED");
12518 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
12520 json.WriteString(
"LAZILY_ALLOCATED");
12526 json.WriteString(
"Stats");
12527 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
12536 if(detailedMap == VK_TRUE)
12538 allocator->PrintDetailedMap(json);
12544 const size_t len = sb.GetLength();
12545 char*
const pChars = vma_new_array(allocator,
char, len + 1);
12548 memcpy(pChars, sb.GetData(), len);
12550 pChars[len] =
'\0';
12551 *ppStatsString = pChars;
12556 char* pStatsString)
12558 if(pStatsString != VMA_NULL)
12560 VMA_ASSERT(allocator);
12561 size_t len = strlen(pStatsString);
12562 vma_delete_array(allocator, pStatsString, len + 1);
12566 #endif // #if VMA_STATS_STRING_ENABLED 12573 uint32_t memoryTypeBits,
12575 uint32_t* pMemoryTypeIndex)
12577 VMA_ASSERT(allocator != VK_NULL_HANDLE);
12578 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
12579 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
12586 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
12587 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
12592 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
12596 switch(pAllocationCreateInfo->
usage)
12601 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12603 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
12607 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12610 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
12611 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12613 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
12617 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
12618 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
12624 *pMemoryTypeIndex = UINT32_MAX;
12625 uint32_t minCost = UINT32_MAX;
12626 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
12627 memTypeIndex < allocator->GetMemoryTypeCount();
12628 ++memTypeIndex, memTypeBit <<= 1)
12631 if((memTypeBit & memoryTypeBits) != 0)
12633 const VkMemoryPropertyFlags currFlags =
12634 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
12636 if((requiredFlags & ~currFlags) == 0)
12639 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
12641 if(currCost < minCost)
12643 *pMemoryTypeIndex = memTypeIndex;
12648 minCost = currCost;
12653 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
12658 const VkBufferCreateInfo* pBufferCreateInfo,
12660 uint32_t* pMemoryTypeIndex)
12662 VMA_ASSERT(allocator != VK_NULL_HANDLE);
12663 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
12664 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
12665 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
12667 const VkDevice hDev = allocator->m_hDevice;
12668 VkBuffer hBuffer = VK_NULL_HANDLE;
12669 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
12670 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
12671 if(res == VK_SUCCESS)
12673 VkMemoryRequirements memReq = {};
12674 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
12675 hDev, hBuffer, &memReq);
12679 memReq.memoryTypeBits,
12680 pAllocationCreateInfo,
12683 allocator->GetVulkanFunctions().vkDestroyBuffer(
12684 hDev, hBuffer, allocator->GetAllocationCallbacks());
12691 const VkImageCreateInfo* pImageCreateInfo,
12693 uint32_t* pMemoryTypeIndex)
12695 VMA_ASSERT(allocator != VK_NULL_HANDLE);
12696 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
12697 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
12698 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
12700 const VkDevice hDev = allocator->m_hDevice;
12701 VkImage hImage = VK_NULL_HANDLE;
12702 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
12703 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
12704 if(res == VK_SUCCESS)
12706 VkMemoryRequirements memReq = {};
12707 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
12708 hDev, hImage, &memReq);
12712 memReq.memoryTypeBits,
12713 pAllocationCreateInfo,
12716 allocator->GetVulkanFunctions().vkDestroyImage(
12717 hDev, hImage, allocator->GetAllocationCallbacks());
12727 VMA_ASSERT(allocator && pCreateInfo && pPool);
12729 VMA_DEBUG_LOG(
"vmaCreatePool");
12731 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12733 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
12735 #if VMA_RECORDING_ENABLED 12736 if(allocator->GetRecorder() != VMA_NULL)
12738 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
12749 VMA_ASSERT(allocator);
12751 if(pool == VK_NULL_HANDLE)
12756 VMA_DEBUG_LOG(
"vmaDestroyPool");
12758 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12760 #if VMA_RECORDING_ENABLED 12761 if(allocator->GetRecorder() != VMA_NULL)
12763 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
12767 allocator->DestroyPool(pool);
12775 VMA_ASSERT(allocator && pool && pPoolStats);
12777 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12779 allocator->GetPoolStats(pool, pPoolStats);
12785 size_t* pLostAllocationCount)
12787 VMA_ASSERT(allocator && pool);
12789 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12791 #if VMA_RECORDING_ENABLED 12792 if(allocator->GetRecorder() != VMA_NULL)
12794 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
12798 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
12803 VMA_ASSERT(allocator && pool);
12805 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12807 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
12809 return allocator->CheckPoolCorruption(pool);
12814 const VkMemoryRequirements* pVkMemoryRequirements,
12819 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
12821 VMA_DEBUG_LOG(
"vmaAllocateMemory");
12823 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12825 VkResult result = allocator->AllocateMemory(
12826 *pVkMemoryRequirements,
12832 VMA_SUBALLOCATION_TYPE_UNKNOWN,
12835 #if VMA_RECORDING_ENABLED 12836 if(allocator->GetRecorder() != VMA_NULL)
12838 allocator->GetRecorder()->RecordAllocateMemory(
12839 allocator->GetCurrentFrameIndex(),
12840 *pVkMemoryRequirements,
12846 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
12848 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
12861 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
12863 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
12865 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12867 VkMemoryRequirements vkMemReq = {};
12868 bool requiresDedicatedAllocation =
false;
12869 bool prefersDedicatedAllocation =
false;
12870 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
12871 requiresDedicatedAllocation,
12872 prefersDedicatedAllocation);
12874 VkResult result = allocator->AllocateMemory(
12876 requiresDedicatedAllocation,
12877 prefersDedicatedAllocation,
12881 VMA_SUBALLOCATION_TYPE_BUFFER,
12884 #if VMA_RECORDING_ENABLED 12885 if(allocator->GetRecorder() != VMA_NULL)
12887 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
12888 allocator->GetCurrentFrameIndex(),
12890 requiresDedicatedAllocation,
12891 prefersDedicatedAllocation,
12897 if(pAllocationInfo && result == VK_SUCCESS)
12899 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
12912 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
12914 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
12916 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12918 VkMemoryRequirements vkMemReq = {};
12919 bool requiresDedicatedAllocation =
false;
12920 bool prefersDedicatedAllocation =
false;
12921 allocator->GetImageMemoryRequirements(image, vkMemReq,
12922 requiresDedicatedAllocation, prefersDedicatedAllocation);
12924 VkResult result = allocator->AllocateMemory(
12926 requiresDedicatedAllocation,
12927 prefersDedicatedAllocation,
12931 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
12934 #if VMA_RECORDING_ENABLED 12935 if(allocator->GetRecorder() != VMA_NULL)
12937 allocator->GetRecorder()->RecordAllocateMemoryForImage(
12938 allocator->GetCurrentFrameIndex(),
12940 requiresDedicatedAllocation,
12941 prefersDedicatedAllocation,
12947 if(pAllocationInfo && result == VK_SUCCESS)
12949 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
12959 VMA_ASSERT(allocator);
12961 if(allocation == VK_NULL_HANDLE)
12966 VMA_DEBUG_LOG(
"vmaFreeMemory");
12968 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12970 #if VMA_RECORDING_ENABLED 12971 if(allocator->GetRecorder() != VMA_NULL)
12973 allocator->GetRecorder()->RecordFreeMemory(
12974 allocator->GetCurrentFrameIndex(),
12979 allocator->FreeMemory(allocation);
12987 VMA_ASSERT(allocator && allocation && pAllocationInfo);
12989 VMA_DEBUG_GLOBAL_MUTEX_LOCK
12991 #if VMA_RECORDING_ENABLED 12992 if(allocator->GetRecorder() != VMA_NULL)
12994 allocator->GetRecorder()->RecordGetAllocationInfo(
12995 allocator->GetCurrentFrameIndex(),
13000 allocator->GetAllocationInfo(allocation, pAllocationInfo);
13007 VMA_ASSERT(allocator && allocation);
13009 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13011 #if VMA_RECORDING_ENABLED 13012 if(allocator->GetRecorder() != VMA_NULL)
13014 allocator->GetRecorder()->RecordTouchAllocation(
13015 allocator->GetCurrentFrameIndex(),
13020 return allocator->TouchAllocation(allocation);
13028 VMA_ASSERT(allocator && allocation);
13030 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13032 allocation->SetUserData(allocator, pUserData);
13034 #if VMA_RECORDING_ENABLED 13035 if(allocator->GetRecorder() != VMA_NULL)
13037 allocator->GetRecorder()->RecordSetAllocationUserData(
13038 allocator->GetCurrentFrameIndex(),
13049 VMA_ASSERT(allocator && pAllocation);
13051 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
13053 allocator->CreateLostAllocation(pAllocation);
13055 #if VMA_RECORDING_ENABLED 13056 if(allocator->GetRecorder() != VMA_NULL)
13058 allocator->GetRecorder()->RecordCreateLostAllocation(
13059 allocator->GetCurrentFrameIndex(),
13070 VMA_ASSERT(allocator && allocation && ppData);
13072 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13074 VkResult res = allocator->Map(allocation, ppData);
13076 #if VMA_RECORDING_ENABLED 13077 if(allocator->GetRecorder() != VMA_NULL)
13079 allocator->GetRecorder()->RecordMapMemory(
13080 allocator->GetCurrentFrameIndex(),
13092 VMA_ASSERT(allocator && allocation);
13094 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13096 #if VMA_RECORDING_ENABLED 13097 if(allocator->GetRecorder() != VMA_NULL)
13099 allocator->GetRecorder()->RecordUnmapMemory(
13100 allocator->GetCurrentFrameIndex(),
13105 allocator->Unmap(allocation);
13110 VMA_ASSERT(allocator && allocation);
13112 VMA_DEBUG_LOG(
"vmaFlushAllocation");
13114 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13116 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
13118 #if VMA_RECORDING_ENABLED 13119 if(allocator->GetRecorder() != VMA_NULL)
13121 allocator->GetRecorder()->RecordFlushAllocation(
13122 allocator->GetCurrentFrameIndex(),
13123 allocation, offset, size);
13130 VMA_ASSERT(allocator && allocation);
13132 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
13134 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13136 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
13138 #if VMA_RECORDING_ENABLED 13139 if(allocator->GetRecorder() != VMA_NULL)
13141 allocator->GetRecorder()->RecordInvalidateAllocation(
13142 allocator->GetCurrentFrameIndex(),
13143 allocation, offset, size);
13150 VMA_ASSERT(allocator);
13152 VMA_DEBUG_LOG(
"vmaCheckCorruption");
13154 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13156 return allocator->CheckCorruption(memoryTypeBits);
13162 size_t allocationCount,
13163 VkBool32* pAllocationsChanged,
13167 VMA_ASSERT(allocator && pAllocations);
13169 VMA_DEBUG_LOG(
"vmaDefragment");
13171 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13173 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
13181 VMA_ASSERT(allocator && allocation && buffer);
13183 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
13185 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13187 return allocator->BindBufferMemory(allocation, buffer);
13195 VMA_ASSERT(allocator && allocation && image);
13197 VMA_DEBUG_LOG(
"vmaBindImageMemory");
13199 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13201 return allocator->BindImageMemory(allocation, image);
13206 const VkBufferCreateInfo* pBufferCreateInfo,
13212 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
13214 VMA_DEBUG_LOG(
"vmaCreateBuffer");
13216 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13218 *pBuffer = VK_NULL_HANDLE;
13219 *pAllocation = VK_NULL_HANDLE;
13222 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
13223 allocator->m_hDevice,
13225 allocator->GetAllocationCallbacks(),
13230 VkMemoryRequirements vkMemReq = {};
13231 bool requiresDedicatedAllocation =
false;
13232 bool prefersDedicatedAllocation =
false;
13233 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
13234 requiresDedicatedAllocation, prefersDedicatedAllocation);
13238 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
13240 VMA_ASSERT(vkMemReq.alignment %
13241 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
13243 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
13245 VMA_ASSERT(vkMemReq.alignment %
13246 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
13248 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
13250 VMA_ASSERT(vkMemReq.alignment %
13251 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
13255 res = allocator->AllocateMemory(
13257 requiresDedicatedAllocation,
13258 prefersDedicatedAllocation,
13261 *pAllocationCreateInfo,
13262 VMA_SUBALLOCATION_TYPE_BUFFER,
13265 #if VMA_RECORDING_ENABLED 13266 if(allocator->GetRecorder() != VMA_NULL)
13268 allocator->GetRecorder()->RecordCreateBuffer(
13269 allocator->GetCurrentFrameIndex(),
13270 *pBufferCreateInfo,
13271 *pAllocationCreateInfo,
13279 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
13283 #if VMA_STATS_STRING_ENABLED 13284 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
13286 if(pAllocationInfo != VMA_NULL)
13288 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13293 allocator->FreeMemory(*pAllocation);
13294 *pAllocation = VK_NULL_HANDLE;
13295 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
13296 *pBuffer = VK_NULL_HANDLE;
13299 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
13300 *pBuffer = VK_NULL_HANDLE;
13311 VMA_ASSERT(allocator);
13313 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
13318 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
13320 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13322 #if VMA_RECORDING_ENABLED 13323 if(allocator->GetRecorder() != VMA_NULL)
13325 allocator->GetRecorder()->RecordDestroyBuffer(
13326 allocator->GetCurrentFrameIndex(),
13331 if(buffer != VK_NULL_HANDLE)
13333 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
13336 if(allocation != VK_NULL_HANDLE)
13338 allocator->FreeMemory(allocation);
13344 const VkImageCreateInfo* pImageCreateInfo,
13350 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
13352 VMA_DEBUG_LOG(
"vmaCreateImage");
13354 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13356 *pImage = VK_NULL_HANDLE;
13357 *pAllocation = VK_NULL_HANDLE;
13360 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
13361 allocator->m_hDevice,
13363 allocator->GetAllocationCallbacks(),
13367 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
13368 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
13369 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
13372 VkMemoryRequirements vkMemReq = {};
13373 bool requiresDedicatedAllocation =
false;
13374 bool prefersDedicatedAllocation =
false;
13375 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
13376 requiresDedicatedAllocation, prefersDedicatedAllocation);
13378 res = allocator->AllocateMemory(
13380 requiresDedicatedAllocation,
13381 prefersDedicatedAllocation,
13384 *pAllocationCreateInfo,
13388 #if VMA_RECORDING_ENABLED 13389 if(allocator->GetRecorder() != VMA_NULL)
13391 allocator->GetRecorder()->RecordCreateImage(
13392 allocator->GetCurrentFrameIndex(),
13394 *pAllocationCreateInfo,
13402 res = allocator->BindImageMemory(*pAllocation, *pImage);
13406 #if VMA_STATS_STRING_ENABLED 13407 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
13409 if(pAllocationInfo != VMA_NULL)
13411 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13416 allocator->FreeMemory(*pAllocation);
13417 *pAllocation = VK_NULL_HANDLE;
13418 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
13419 *pImage = VK_NULL_HANDLE;
13422 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
13423 *pImage = VK_NULL_HANDLE;
13434 VMA_ASSERT(allocator);
13436 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
13441 VMA_DEBUG_LOG(
"vmaDestroyImage");
13443 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13445 #if VMA_RECORDING_ENABLED 13446 if(allocator->GetRecorder() != VMA_NULL)
13448 allocator->GetRecorder()->RecordDestroyImage(
13449 allocator->GetCurrentFrameIndex(),
13454 if(image != VK_NULL_HANDLE)
13456 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
13458 if(allocation != VK_NULL_HANDLE)
13460 allocator->FreeMemory(allocation);
13464 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1446
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1759
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1515
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1477
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2032
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1458
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1716
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1450
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2132
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1512
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2377
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1940
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1489
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2013
Definition: vk_mem_alloc.h:1796
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1439
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1839
Definition: vk_mem_alloc.h:1743
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1524
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1577
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1509
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1747
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1649
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1455
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1648
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2381
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1541
VmaStatInfo total
Definition: vk_mem_alloc.h:1658
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2389
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1823
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2372
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1456
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1381
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1518
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1963
Definition: vk_mem_alloc.h:1957
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1584
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2142
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1451
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1475
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1860
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1983
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2019
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1437
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1966
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1694
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2367
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2385
Definition: vk_mem_alloc.h:1733
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1847
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1454
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1654
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1387
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1408
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1479
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1413
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2387
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1834
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2029
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1447
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1637
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:1978
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1400
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1803
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1650
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1404
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1969
Definition: vk_mem_alloc.h:1742
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1453
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1829
Definition: vk_mem_alloc.h:1820
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1640
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1449
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1991
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1527
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2022
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1818
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1853
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1565
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1656
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1783
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1649
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1460
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1497
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1402
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1459
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2005
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1452
Definition: vk_mem_alloc.h:1814
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1505
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2156
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1521
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1649
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1646
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2010
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2137
Definition: vk_mem_alloc.h:1816
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2383
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1445
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1644
Definition: vk_mem_alloc.h:1699
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1959
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1494
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1642
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1457
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1461
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1770
Definition: vk_mem_alloc.h:1726
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2151
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1435
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1448
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:1955
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2118
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1922
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1650
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1809
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1469
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1657
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2016
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1650
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2123