23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1481 #ifndef VMA_RECORDING_ENABLED 1483 #define VMA_RECORDING_ENABLED 1 1485 #define VMA_RECORDING_ENABLED 0 1490 #define NOMINMAX // For windows.h 1493 #include <vulkan/vulkan.h> 1495 #if VMA_RECORDING_ENABLED 1496 #include <windows.h> 1499 #if !defined(VMA_DEDICATED_ALLOCATION) 1500 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1501 #define VMA_DEDICATED_ALLOCATION 1 1503 #define VMA_DEDICATED_ALLOCATION 0 1521 uint32_t memoryType,
1522 VkDeviceMemory memory,
1527 uint32_t memoryType,
1528 VkDeviceMemory memory,
1600 #if VMA_DEDICATED_ALLOCATION 1601 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1602 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1728 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1736 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1746 uint32_t memoryTypeIndex,
1747 VkMemoryPropertyFlags* pFlags);
1759 uint32_t frameIndex);
1792 #define VMA_STATS_STRING_ENABLED 1 1794 #if VMA_STATS_STRING_ENABLED 1801 char** ppStatsString,
1802 VkBool32 detailedMap);
1806 char* pStatsString);
1808 #endif // #if VMA_STATS_STRING_ENABLED 2037 uint32_t memoryTypeBits,
2039 uint32_t* pMemoryTypeIndex);
2055 const VkBufferCreateInfo* pBufferCreateInfo,
2057 uint32_t* pMemoryTypeIndex);
2073 const VkImageCreateInfo* pImageCreateInfo,
2075 uint32_t* pMemoryTypeIndex);
2247 size_t* pLostAllocationCount);
2346 const VkMemoryRequirements* pVkMemoryRequirements,
2608 size_t allocationCount,
2609 VkBool32* pAllocationsChanged,
2675 const VkBufferCreateInfo* pBufferCreateInfo,
2700 const VkImageCreateInfo* pImageCreateInfo,
2726 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2729 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2730 #define VMA_IMPLEMENTATION 2733 #ifdef VMA_IMPLEMENTATION 2734 #undef VMA_IMPLEMENTATION 2756 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2757 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2769 #if VMA_USE_STL_CONTAINERS 2770 #define VMA_USE_STL_VECTOR 1 2771 #define VMA_USE_STL_UNORDERED_MAP 1 2772 #define VMA_USE_STL_LIST 1 2775 #if VMA_USE_STL_VECTOR 2779 #if VMA_USE_STL_UNORDERED_MAP 2780 #include <unordered_map> 2783 #if VMA_USE_STL_LIST 2792 #include <algorithm> 2798 #define VMA_NULL nullptr 2801 #if defined(__APPLE__) || defined(__ANDROID__) 2803 void *aligned_alloc(
size_t alignment,
size_t size)
2806 if(alignment <
sizeof(
void*))
2808 alignment =
sizeof(
void*);
2812 if(posix_memalign(&pointer, alignment, size) == 0)
2826 #define VMA_ASSERT(expr) assert(expr) 2828 #define VMA_ASSERT(expr) 2834 #ifndef VMA_HEAVY_ASSERT 2836 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2838 #define VMA_HEAVY_ASSERT(expr) 2842 #ifndef VMA_ALIGN_OF 2843 #define VMA_ALIGN_OF(type) (__alignof(type)) 2846 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2848 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2850 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2854 #ifndef VMA_SYSTEM_FREE 2856 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2858 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2863 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2867 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2871 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2875 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2878 #ifndef VMA_DEBUG_LOG 2879 #define VMA_DEBUG_LOG(format, ...) 2889 #if VMA_STATS_STRING_ENABLED 2890 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2892 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2894 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2896 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2898 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2900 snprintf(outStr, strLen,
"%p", ptr);
2910 void Lock() { m_Mutex.lock(); }
2911 void Unlock() { m_Mutex.unlock(); }
2915 #define VMA_MUTEX VmaMutex 2926 #ifndef VMA_ATOMIC_UINT32 2927 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2930 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2935 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2938 #ifndef VMA_DEBUG_ALIGNMENT 2943 #define VMA_DEBUG_ALIGNMENT (1) 2946 #ifndef VMA_DEBUG_MARGIN 2951 #define VMA_DEBUG_MARGIN (0) 2954 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2959 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2962 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2968 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2971 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2976 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2979 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2984 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2987 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2988 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2992 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2993 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2997 #ifndef VMA_CLASS_NO_COPY 2998 #define VMA_CLASS_NO_COPY(className) \ 3000 className(const className&) = delete; \ 3001 className& operator=(const className&) = delete; 3004 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3007 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3009 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3010 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3016 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3017 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3020 static inline uint32_t VmaCountBitsSet(uint32_t v)
3022 uint32_t c = v - ((v >> 1) & 0x55555555);
3023 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3024 c = ((c >> 4) + c) & 0x0F0F0F0F;
3025 c = ((c >> 8) + c) & 0x00FF00FF;
3026 c = ((c >> 16) + c) & 0x0000FFFF;
3032 template <
typename T>
3033 static inline T VmaAlignUp(T val, T align)
3035 return (val + align - 1) / align * align;
3039 template <
typename T>
3040 static inline T VmaAlignDown(T val, T align)
3042 return val / align * align;
3046 template <
typename T>
3047 static inline T VmaRoundDiv(T x, T y)
3049 return (x + (y / (T)2)) / y;
3057 template <
typename T>
3058 inline bool VmaIsPow2(T x)
3060 return (x & (x-1)) == 0;
3064 static inline uint32_t VmaNextPow2(uint32_t v)
3075 static inline uint64_t VmaNextPow2(uint64_t v)
3089 static inline uint32_t VmaPrevPow2(uint32_t v)
3099 static inline uint64_t VmaPrevPow2(uint64_t v)
3111 static inline bool VmaStrIsEmpty(
const char* pStr)
3113 return pStr == VMA_NULL || *pStr ==
'\0';
3116 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3134 template<
typename Iterator,
typename Compare>
3135 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3137 Iterator centerValue = end; --centerValue;
3138 Iterator insertIndex = beg;
3139 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3141 if(cmp(*memTypeIndex, *centerValue))
3143 if(insertIndex != memTypeIndex)
3145 VMA_SWAP(*memTypeIndex, *insertIndex);
3150 if(insertIndex != centerValue)
3152 VMA_SWAP(*insertIndex, *centerValue);
3157 template<
typename Iterator,
typename Compare>
3158 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3162 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3163 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3164 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3168 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3170 #endif // #ifndef VMA_SORT 3179 static inline bool VmaBlocksOnSamePage(
3180 VkDeviceSize resourceAOffset,
3181 VkDeviceSize resourceASize,
3182 VkDeviceSize resourceBOffset,
3183 VkDeviceSize pageSize)
3185 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3186 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3187 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3188 VkDeviceSize resourceBStart = resourceBOffset;
3189 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3190 return resourceAEndPage == resourceBStartPage;
3193 enum VmaSuballocationType
3195 VMA_SUBALLOCATION_TYPE_FREE = 0,
3196 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3197 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3198 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3199 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3200 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3201 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3210 static inline bool VmaIsBufferImageGranularityConflict(
3211 VmaSuballocationType suballocType1,
3212 VmaSuballocationType suballocType2)
3214 if(suballocType1 > suballocType2)
3216 VMA_SWAP(suballocType1, suballocType2);
3219 switch(suballocType1)
3221 case VMA_SUBALLOCATION_TYPE_FREE:
3223 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3225 case VMA_SUBALLOCATION_TYPE_BUFFER:
3227 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3228 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3229 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3231 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3232 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3233 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3234 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3236 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3237 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3245 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3247 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3248 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3249 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3251 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3255 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3257 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3258 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3259 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3261 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3272 VMA_CLASS_NO_COPY(VmaMutexLock)
3274 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3275 m_pMutex(useMutex ? &mutex : VMA_NULL)
3292 VMA_MUTEX* m_pMutex;
3295 #if VMA_DEBUG_GLOBAL_MUTEX 3296 static VMA_MUTEX gDebugGlobalMutex;
3297 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3299 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3303 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3314 template <
typename CmpLess,
typename IterT,
typename KeyT>
3315 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3317 size_t down = 0, up = (end - beg);
3320 const size_t mid = (down + up) / 2;
3321 if(cmp(*(beg+mid), key))
3336 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3338 if((pAllocationCallbacks != VMA_NULL) &&
3339 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3341 return (*pAllocationCallbacks->pfnAllocation)(
3342 pAllocationCallbacks->pUserData,
3345 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3349 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3353 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3355 if((pAllocationCallbacks != VMA_NULL) &&
3356 (pAllocationCallbacks->pfnFree != VMA_NULL))
3358 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3362 VMA_SYSTEM_FREE(ptr);
3366 template<
typename T>
3367 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3369 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3372 template<
typename T>
3373 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3375 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3378 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3380 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3382 template<
typename T>
3383 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3386 VmaFree(pAllocationCallbacks, ptr);
3389 template<
typename T>
3390 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3394 for(
size_t i = count; i--; )
3398 VmaFree(pAllocationCallbacks, ptr);
3403 template<
typename T>
3404 class VmaStlAllocator
3407 const VkAllocationCallbacks*
const m_pCallbacks;
3408 typedef T value_type;
3410 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3411 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3413 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3414 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3416 template<
typename U>
3417 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3419 return m_pCallbacks == rhs.m_pCallbacks;
3421 template<
typename U>
3422 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3424 return m_pCallbacks != rhs.m_pCallbacks;
3427 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3430 #if VMA_USE_STL_VECTOR 3432 #define VmaVector std::vector 3434 template<
typename T,
typename allocatorT>
3435 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3437 vec.insert(vec.begin() + index, item);
3440 template<
typename T,
typename allocatorT>
3441 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3443 vec.erase(vec.begin() + index);
3446 #else // #if VMA_USE_STL_VECTOR 3451 template<
typename T,
typename AllocatorT>
3455 typedef T value_type;
3457 VmaVector(
const AllocatorT& allocator) :
3458 m_Allocator(allocator),
3465 VmaVector(
size_t count,
const AllocatorT& allocator) :
3466 m_Allocator(allocator),
3467 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3473 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3474 m_Allocator(src.m_Allocator),
3475 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3476 m_Count(src.m_Count),
3477 m_Capacity(src.m_Count)
3481 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3487 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3490 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3494 resize(rhs.m_Count);
3497 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3503 bool empty()
const {
return m_Count == 0; }
3504 size_t size()
const {
return m_Count; }
3505 T* data() {
return m_pArray; }
3506 const T* data()
const {
return m_pArray; }
3508 T& operator[](
size_t index)
3510 VMA_HEAVY_ASSERT(index < m_Count);
3511 return m_pArray[index];
3513 const T& operator[](
size_t index)
const 3515 VMA_HEAVY_ASSERT(index < m_Count);
3516 return m_pArray[index];
3521 VMA_HEAVY_ASSERT(m_Count > 0);
3524 const T& front()
const 3526 VMA_HEAVY_ASSERT(m_Count > 0);
3531 VMA_HEAVY_ASSERT(m_Count > 0);
3532 return m_pArray[m_Count - 1];
3534 const T& back()
const 3536 VMA_HEAVY_ASSERT(m_Count > 0);
3537 return m_pArray[m_Count - 1];
3540 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3542 newCapacity = VMA_MAX(newCapacity, m_Count);
3544 if((newCapacity < m_Capacity) && !freeMemory)
3546 newCapacity = m_Capacity;
3549 if(newCapacity != m_Capacity)
3551 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3554 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3556 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3557 m_Capacity = newCapacity;
3558 m_pArray = newArray;
3562 void resize(
size_t newCount,
bool freeMemory =
false)
3564 size_t newCapacity = m_Capacity;
3565 if(newCount > m_Capacity)
3567 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3571 newCapacity = newCount;
3574 if(newCapacity != m_Capacity)
3576 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3577 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3578 if(elementsToCopy != 0)
3580 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3582 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3583 m_Capacity = newCapacity;
3584 m_pArray = newArray;
3590 void clear(
bool freeMemory =
false)
3592 resize(0, freeMemory);
3595 void insert(
size_t index,
const T& src)
3597 VMA_HEAVY_ASSERT(index <= m_Count);
3598 const size_t oldCount = size();
3599 resize(oldCount + 1);
3600 if(index < oldCount)
3602 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3604 m_pArray[index] = src;
3607 void remove(
size_t index)
3609 VMA_HEAVY_ASSERT(index < m_Count);
3610 const size_t oldCount = size();
3611 if(index < oldCount - 1)
3613 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3615 resize(oldCount - 1);
3618 void push_back(
const T& src)
3620 const size_t newIndex = size();
3621 resize(newIndex + 1);
3622 m_pArray[newIndex] = src;
3627 VMA_HEAVY_ASSERT(m_Count > 0);
3631 void push_front(
const T& src)
3638 VMA_HEAVY_ASSERT(m_Count > 0);
3642 typedef T* iterator;
3644 iterator begin() {
return m_pArray; }
3645 iterator end() {
return m_pArray + m_Count; }
3648 AllocatorT m_Allocator;
3654 template<
typename T,
typename allocatorT>
3655 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3657 vec.insert(index, item);
3660 template<
typename T,
typename allocatorT>
3661 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3666 #endif // #if VMA_USE_STL_VECTOR 3668 template<
typename CmpLess,
typename VectorT>
3669 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3671 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3673 vector.data() + vector.size(),
3675 CmpLess()) - vector.data();
3676 VmaVectorInsert(vector, indexToInsert, value);
3677 return indexToInsert;
3680 template<
typename CmpLess,
typename VectorT>
3681 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3684 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3689 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3691 size_t indexToRemove = it - vector.begin();
3692 VmaVectorRemove(vector, indexToRemove);
3698 template<
typename CmpLess,
typename IterT,
typename KeyT>
3699 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3702 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3703 beg, end, value, comparator);
3705 (!comparator(*it, value) && !comparator(value, *it)))
3720 template<
typename T>
3721 class VmaPoolAllocator
3723 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3725 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3726 ~VmaPoolAllocator();
3734 uint32_t NextFreeIndex;
3741 uint32_t FirstFreeIndex;
3744 const VkAllocationCallbacks* m_pAllocationCallbacks;
3745 size_t m_ItemsPerBlock;
3746 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3748 ItemBlock& CreateNewBlock();
3751 template<
typename T>
3752 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3753 m_pAllocationCallbacks(pAllocationCallbacks),
3754 m_ItemsPerBlock(itemsPerBlock),
3755 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3757 VMA_ASSERT(itemsPerBlock > 0);
3760 template<
typename T>
3761 VmaPoolAllocator<T>::~VmaPoolAllocator()
3766 template<
typename T>
3767 void VmaPoolAllocator<T>::Clear()
3769 for(
size_t i = m_ItemBlocks.size(); i--; )
3770 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3771 m_ItemBlocks.clear();
3774 template<
typename T>
3775 T* VmaPoolAllocator<T>::Alloc()
3777 for(
size_t i = m_ItemBlocks.size(); i--; )
3779 ItemBlock& block = m_ItemBlocks[i];
3781 if(block.FirstFreeIndex != UINT32_MAX)
3783 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3784 block.FirstFreeIndex = pItem->NextFreeIndex;
3785 return &pItem->Value;
3790 ItemBlock& newBlock = CreateNewBlock();
3791 Item*
const pItem = &newBlock.pItems[0];
3792 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3793 return &pItem->Value;
3796 template<
typename T>
3797 void VmaPoolAllocator<T>::Free(T* ptr)
3800 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3802 ItemBlock& block = m_ItemBlocks[i];
3806 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3809 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3811 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3812 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3813 block.FirstFreeIndex = index;
3817 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3820 template<
typename T>
3821 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3823 ItemBlock newBlock = {
3824 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3826 m_ItemBlocks.push_back(newBlock);
3829 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3830 newBlock.pItems[i].NextFreeIndex = i + 1;
3831 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3832 return m_ItemBlocks.back();
3838 #if VMA_USE_STL_LIST 3840 #define VmaList std::list 3842 #else // #if VMA_USE_STL_LIST 3844 template<
typename T>
3853 template<
typename T>
3856 VMA_CLASS_NO_COPY(VmaRawList)
3858 typedef VmaListItem<T> ItemType;
3860 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3864 size_t GetCount()
const {
return m_Count; }
3865 bool IsEmpty()
const {
return m_Count == 0; }
3867 ItemType* Front() {
return m_pFront; }
3868 const ItemType* Front()
const {
return m_pFront; }
3869 ItemType* Back() {
return m_pBack; }
3870 const ItemType* Back()
const {
return m_pBack; }
3872 ItemType* PushBack();
3873 ItemType* PushFront();
3874 ItemType* PushBack(
const T& value);
3875 ItemType* PushFront(
const T& value);
3880 ItemType* InsertBefore(ItemType* pItem);
3882 ItemType* InsertAfter(ItemType* pItem);
3884 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3885 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3887 void Remove(ItemType* pItem);
3890 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3891 VmaPoolAllocator<ItemType> m_ItemAllocator;
3897 template<
typename T>
3898 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3899 m_pAllocationCallbacks(pAllocationCallbacks),
3900 m_ItemAllocator(pAllocationCallbacks, 128),
3907 template<
typename T>
3908 VmaRawList<T>::~VmaRawList()
3914 template<
typename T>
3915 void VmaRawList<T>::Clear()
3917 if(IsEmpty() ==
false)
3919 ItemType* pItem = m_pBack;
3920 while(pItem != VMA_NULL)
3922 ItemType*
const pPrevItem = pItem->pPrev;
3923 m_ItemAllocator.Free(pItem);
3926 m_pFront = VMA_NULL;
3932 template<
typename T>
3933 VmaListItem<T>* VmaRawList<T>::PushBack()
3935 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3936 pNewItem->pNext = VMA_NULL;
3939 pNewItem->pPrev = VMA_NULL;
3940 m_pFront = pNewItem;
3946 pNewItem->pPrev = m_pBack;
3947 m_pBack->pNext = pNewItem;
3954 template<
typename T>
3955 VmaListItem<T>* VmaRawList<T>::PushFront()
3957 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3958 pNewItem->pPrev = VMA_NULL;
3961 pNewItem->pNext = VMA_NULL;
3962 m_pFront = pNewItem;
3968 pNewItem->pNext = m_pFront;
3969 m_pFront->pPrev = pNewItem;
3970 m_pFront = pNewItem;
3976 template<
typename T>
3977 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3979 ItemType*
const pNewItem = PushBack();
3980 pNewItem->Value = value;
3984 template<
typename T>
3985 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3987 ItemType*
const pNewItem = PushFront();
3988 pNewItem->Value = value;
3992 template<
typename T>
3993 void VmaRawList<T>::PopBack()
3995 VMA_HEAVY_ASSERT(m_Count > 0);
3996 ItemType*
const pBackItem = m_pBack;
3997 ItemType*
const pPrevItem = pBackItem->pPrev;
3998 if(pPrevItem != VMA_NULL)
4000 pPrevItem->pNext = VMA_NULL;
4002 m_pBack = pPrevItem;
4003 m_ItemAllocator.Free(pBackItem);
4007 template<
typename T>
4008 void VmaRawList<T>::PopFront()
4010 VMA_HEAVY_ASSERT(m_Count > 0);
4011 ItemType*
const pFrontItem = m_pFront;
4012 ItemType*
const pNextItem = pFrontItem->pNext;
4013 if(pNextItem != VMA_NULL)
4015 pNextItem->pPrev = VMA_NULL;
4017 m_pFront = pNextItem;
4018 m_ItemAllocator.Free(pFrontItem);
4022 template<
typename T>
4023 void VmaRawList<T>::Remove(ItemType* pItem)
4025 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4026 VMA_HEAVY_ASSERT(m_Count > 0);
4028 if(pItem->pPrev != VMA_NULL)
4030 pItem->pPrev->pNext = pItem->pNext;
4034 VMA_HEAVY_ASSERT(m_pFront == pItem);
4035 m_pFront = pItem->pNext;
4038 if(pItem->pNext != VMA_NULL)
4040 pItem->pNext->pPrev = pItem->pPrev;
4044 VMA_HEAVY_ASSERT(m_pBack == pItem);
4045 m_pBack = pItem->pPrev;
4048 m_ItemAllocator.Free(pItem);
4052 template<
typename T>
4053 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4055 if(pItem != VMA_NULL)
4057 ItemType*
const prevItem = pItem->pPrev;
4058 ItemType*
const newItem = m_ItemAllocator.Alloc();
4059 newItem->pPrev = prevItem;
4060 newItem->pNext = pItem;
4061 pItem->pPrev = newItem;
4062 if(prevItem != VMA_NULL)
4064 prevItem->pNext = newItem;
4068 VMA_HEAVY_ASSERT(m_pFront == pItem);
4078 template<
typename T>
4079 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4081 if(pItem != VMA_NULL)
4083 ItemType*
const nextItem = pItem->pNext;
4084 ItemType*
const newItem = m_ItemAllocator.Alloc();
4085 newItem->pNext = nextItem;
4086 newItem->pPrev = pItem;
4087 pItem->pNext = newItem;
4088 if(nextItem != VMA_NULL)
4090 nextItem->pPrev = newItem;
4094 VMA_HEAVY_ASSERT(m_pBack == pItem);
4104 template<
typename T>
4105 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4107 ItemType*
const newItem = InsertBefore(pItem);
4108 newItem->Value = value;
4112 template<
typename T>
4113 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4115 ItemType*
const newItem = InsertAfter(pItem);
4116 newItem->Value = value;
4120 template<
typename T,
typename AllocatorT>
4123 VMA_CLASS_NO_COPY(VmaList)
4134 T& operator*()
const 4136 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4137 return m_pItem->Value;
4139 T* operator->()
const 4141 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4142 return &m_pItem->Value;
4145 iterator& operator++()
4147 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4148 m_pItem = m_pItem->pNext;
4151 iterator& operator--()
4153 if(m_pItem != VMA_NULL)
4155 m_pItem = m_pItem->pPrev;
4159 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4160 m_pItem = m_pList->Back();
4165 iterator operator++(
int)
4167 iterator result = *
this;
4171 iterator operator--(
int)
4173 iterator result = *
this;
4178 bool operator==(
const iterator& rhs)
const 4180 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4181 return m_pItem == rhs.m_pItem;
4183 bool operator!=(
const iterator& rhs)
const 4185 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4186 return m_pItem != rhs.m_pItem;
4190 VmaRawList<T>* m_pList;
4191 VmaListItem<T>* m_pItem;
4193 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4199 friend class VmaList<T, AllocatorT>;
4202 class const_iterator
4211 const_iterator(
const iterator& src) :
4212 m_pList(src.m_pList),
4213 m_pItem(src.m_pItem)
4217 const T& operator*()
const 4219 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4220 return m_pItem->Value;
4222 const T* operator->()
const 4224 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4225 return &m_pItem->Value;
4228 const_iterator& operator++()
4230 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4231 m_pItem = m_pItem->pNext;
4234 const_iterator& operator--()
4236 if(m_pItem != VMA_NULL)
4238 m_pItem = m_pItem->pPrev;
4242 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4243 m_pItem = m_pList->Back();
4248 const_iterator operator++(
int)
4250 const_iterator result = *
this;
4254 const_iterator operator--(
int)
4256 const_iterator result = *
this;
4261 bool operator==(
const const_iterator& rhs)
const 4263 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4264 return m_pItem == rhs.m_pItem;
4266 bool operator!=(
const const_iterator& rhs)
const 4268 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4269 return m_pItem != rhs.m_pItem;
4273 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4279 const VmaRawList<T>* m_pList;
4280 const VmaListItem<T>* m_pItem;
4282 friend class VmaList<T, AllocatorT>;
4285 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4287 bool empty()
const {
return m_RawList.IsEmpty(); }
4288 size_t size()
const {
return m_RawList.GetCount(); }
4290 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4291 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4293 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4294 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4296 void clear() { m_RawList.Clear(); }
4297 void push_back(
const T& value) { m_RawList.PushBack(value); }
4298 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4299 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4302 VmaRawList<T> m_RawList;
4305 #endif // #if VMA_USE_STL_LIST 4313 #if VMA_USE_STL_UNORDERED_MAP 4315 #define VmaPair std::pair 4317 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4318 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4320 #else // #if VMA_USE_STL_UNORDERED_MAP 4322 template<
typename T1,
typename T2>
4328 VmaPair() : first(), second() { }
4329 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4335 template<
typename KeyT,
typename ValueT>
4339 typedef VmaPair<KeyT, ValueT> PairType;
4340 typedef PairType* iterator;
4342 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4344 iterator begin() {
return m_Vector.begin(); }
4345 iterator end() {
return m_Vector.end(); }
4347 void insert(
const PairType& pair);
4348 iterator find(
const KeyT& key);
4349 void erase(iterator it);
4352 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4355 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4357 template<
typename FirstT,
typename SecondT>
4358 struct VmaPairFirstLess
4360 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4362 return lhs.first < rhs.first;
4364 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4366 return lhs.first < rhsFirst;
4370 template<
typename KeyT,
typename ValueT>
4371 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4373 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4375 m_Vector.data() + m_Vector.size(),
4377 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4378 VmaVectorInsert(m_Vector, indexToInsert, pair);
4381 template<
typename KeyT,
typename ValueT>
4382 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4384 PairType* it = VmaBinaryFindFirstNotLess(
4386 m_Vector.data() + m_Vector.size(),
4388 VmaPairFirstLess<KeyT, ValueT>());
4389 if((it != m_Vector.end()) && (it->first == key))
4395 return m_Vector.end();
4399 template<
typename KeyT,
typename ValueT>
4400 void VmaMap<KeyT, ValueT>::erase(iterator it)
4402 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4405 #endif // #if VMA_USE_STL_UNORDERED_MAP 4411 class VmaDeviceMemoryBlock;
4413 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4415 struct VmaAllocation_T
4417 VMA_CLASS_NO_COPY(VmaAllocation_T)
4419 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4423 FLAG_USER_DATA_STRING = 0x01,
4427 enum ALLOCATION_TYPE
4429 ALLOCATION_TYPE_NONE,
4430 ALLOCATION_TYPE_BLOCK,
4431 ALLOCATION_TYPE_DEDICATED,
4434 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4437 m_pUserData(VMA_NULL),
4438 m_LastUseFrameIndex(currentFrameIndex),
4439 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4440 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4442 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4444 #if VMA_STATS_STRING_ENABLED 4445 m_CreationFrameIndex = currentFrameIndex;
4446 m_BufferImageUsage = 0;
4452 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4455 VMA_ASSERT(m_pUserData == VMA_NULL);
4458 void InitBlockAllocation(
4460 VmaDeviceMemoryBlock* block,
4461 VkDeviceSize offset,
4462 VkDeviceSize alignment,
4464 VmaSuballocationType suballocationType,
4468 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4469 VMA_ASSERT(block != VMA_NULL);
4470 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4471 m_Alignment = alignment;
4473 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4474 m_SuballocationType = (uint8_t)suballocationType;
4475 m_BlockAllocation.m_hPool = hPool;
4476 m_BlockAllocation.m_Block = block;
4477 m_BlockAllocation.m_Offset = offset;
4478 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4483 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4484 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4485 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4486 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4487 m_BlockAllocation.m_Block = VMA_NULL;
4488 m_BlockAllocation.m_Offset = 0;
4489 m_BlockAllocation.m_CanBecomeLost =
true;
4492 void ChangeBlockAllocation(
4494 VmaDeviceMemoryBlock* block,
4495 VkDeviceSize offset);
4498 void InitDedicatedAllocation(
4499 uint32_t memoryTypeIndex,
4500 VkDeviceMemory hMemory,
4501 VmaSuballocationType suballocationType,
4505 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4506 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4507 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4510 m_SuballocationType = (uint8_t)suballocationType;
4511 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4512 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4513 m_DedicatedAllocation.m_hMemory = hMemory;
4514 m_DedicatedAllocation.m_pMappedData = pMappedData;
4517 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4518 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4519 VkDeviceSize GetSize()
const {
return m_Size; }
4520 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4521 void* GetUserData()
const {
return m_pUserData; }
4522 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4523 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4525 VmaDeviceMemoryBlock* GetBlock()
const 4527 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4528 return m_BlockAllocation.m_Block;
4530 VkDeviceSize GetOffset()
const;
4531 VkDeviceMemory GetMemory()
const;
4532 uint32_t GetMemoryTypeIndex()
const;
4533 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4534 void* GetMappedData()
const;
4535 bool CanBecomeLost()
const;
4538 uint32_t GetLastUseFrameIndex()
const 4540 return m_LastUseFrameIndex.load();
4542 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4544 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4554 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4556 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4558 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4569 void BlockAllocMap();
4570 void BlockAllocUnmap();
4571 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4574 #if VMA_STATS_STRING_ENABLED 4575 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4576 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4578 void InitBufferImageUsage(uint32_t bufferImageUsage)
4580 VMA_ASSERT(m_BufferImageUsage == 0);
4581 m_BufferImageUsage = bufferImageUsage;
4584 void PrintParameters(
class VmaJsonWriter& json)
const;
4588 VkDeviceSize m_Alignment;
4589 VkDeviceSize m_Size;
4591 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4593 uint8_t m_SuballocationType;
4600 struct BlockAllocation
4603 VmaDeviceMemoryBlock* m_Block;
4604 VkDeviceSize m_Offset;
4605 bool m_CanBecomeLost;
4609 struct DedicatedAllocation
4611 uint32_t m_MemoryTypeIndex;
4612 VkDeviceMemory m_hMemory;
4613 void* m_pMappedData;
4619 BlockAllocation m_BlockAllocation;
4621 DedicatedAllocation m_DedicatedAllocation;
4624 #if VMA_STATS_STRING_ENABLED 4625 uint32_t m_CreationFrameIndex;
4626 uint32_t m_BufferImageUsage;
4636 struct VmaSuballocation
4638 VkDeviceSize offset;
4641 VmaSuballocationType type;
4645 struct VmaSuballocationOffsetLess
4647 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4649 return lhs.offset < rhs.offset;
4652 struct VmaSuballocationOffsetGreater
4654 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4656 return lhs.offset > rhs.offset;
4660 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4663 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4678 struct VmaAllocationRequest
4680 VkDeviceSize offset;
4681 VkDeviceSize sumFreeSize;
4682 VkDeviceSize sumItemSize;
4683 VmaSuballocationList::iterator item;
4684 size_t itemsToMakeLostCount;
4687 VkDeviceSize CalcCost()
const 4689 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4697 class VmaBlockMetadata
4701 virtual ~VmaBlockMetadata() { }
4702 virtual void Init(VkDeviceSize size) { m_Size = size; }
4705 virtual bool Validate()
const = 0;
4706 VkDeviceSize GetSize()
const {
return m_Size; }
4707 virtual size_t GetAllocationCount()
const = 0;
4708 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4709 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4711 virtual bool IsEmpty()
const = 0;
4713 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4715 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4717 #if VMA_STATS_STRING_ENABLED 4718 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4724 virtual bool CreateAllocationRequest(
4725 uint32_t currentFrameIndex,
4726 uint32_t frameInUseCount,
4727 VkDeviceSize bufferImageGranularity,
4728 VkDeviceSize allocSize,
4729 VkDeviceSize allocAlignment,
4731 VmaSuballocationType allocType,
4732 bool canMakeOtherLost,
4734 VmaAllocationRequest* pAllocationRequest) = 0;
4736 virtual bool MakeRequestedAllocationsLost(
4737 uint32_t currentFrameIndex,
4738 uint32_t frameInUseCount,
4739 VmaAllocationRequest* pAllocationRequest) = 0;
4741 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4743 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4747 const VmaAllocationRequest& request,
4748 VmaSuballocationType type,
4749 VkDeviceSize allocSize,
4755 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4758 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
4760 #if VMA_STATS_STRING_ENABLED 4761 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4762 VkDeviceSize unusedBytes,
4763 size_t allocationCount,
4764 size_t unusedRangeCount)
const;
4765 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4766 VkDeviceSize offset,
4768 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4769 VkDeviceSize offset,
4770 VkDeviceSize size)
const;
4771 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4775 VkDeviceSize m_Size;
4776 const VkAllocationCallbacks* m_pAllocationCallbacks;
4779 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 4780 VMA_ASSERT(0 && "Validation failed: " #cond); \ 4784 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4786 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4789 virtual ~VmaBlockMetadata_Generic();
4790 virtual void Init(VkDeviceSize size);
4792 virtual bool Validate()
const;
4793 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4794 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4795 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4796 virtual bool IsEmpty()
const;
4798 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4799 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4801 #if VMA_STATS_STRING_ENABLED 4802 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4805 virtual bool CreateAllocationRequest(
4806 uint32_t currentFrameIndex,
4807 uint32_t frameInUseCount,
4808 VkDeviceSize bufferImageGranularity,
4809 VkDeviceSize allocSize,
4810 VkDeviceSize allocAlignment,
4812 VmaSuballocationType allocType,
4813 bool canMakeOtherLost,
4815 VmaAllocationRequest* pAllocationRequest);
4817 virtual bool MakeRequestedAllocationsLost(
4818 uint32_t currentFrameIndex,
4819 uint32_t frameInUseCount,
4820 VmaAllocationRequest* pAllocationRequest);
4822 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4824 virtual VkResult CheckCorruption(
const void* pBlockData);
4827 const VmaAllocationRequest& request,
4828 VmaSuballocationType type,
4829 VkDeviceSize allocSize,
4834 virtual void FreeAtOffset(VkDeviceSize offset);
4837 uint32_t m_FreeCount;
4838 VkDeviceSize m_SumFreeSize;
4839 VmaSuballocationList m_Suballocations;
4842 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4844 bool ValidateFreeSuballocationList()
const;
4848 bool CheckAllocation(
4849 uint32_t currentFrameIndex,
4850 uint32_t frameInUseCount,
4851 VkDeviceSize bufferImageGranularity,
4852 VkDeviceSize allocSize,
4853 VkDeviceSize allocAlignment,
4854 VmaSuballocationType allocType,
4855 VmaSuballocationList::const_iterator suballocItem,
4856 bool canMakeOtherLost,
4857 VkDeviceSize* pOffset,
4858 size_t* itemsToMakeLostCount,
4859 VkDeviceSize* pSumFreeSize,
4860 VkDeviceSize* pSumItemSize)
const;
4862 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4866 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4869 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4872 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4953 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
4955 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
4958 virtual ~VmaBlockMetadata_Linear();
4959 virtual void Init(VkDeviceSize size);
4961 virtual bool Validate()
const;
4962 virtual size_t GetAllocationCount()
const;
4963 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4964 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4965 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
4967 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4968 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4970 #if VMA_STATS_STRING_ENABLED 4971 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4974 virtual bool CreateAllocationRequest(
4975 uint32_t currentFrameIndex,
4976 uint32_t frameInUseCount,
4977 VkDeviceSize bufferImageGranularity,
4978 VkDeviceSize allocSize,
4979 VkDeviceSize allocAlignment,
4981 VmaSuballocationType allocType,
4982 bool canMakeOtherLost,
4984 VmaAllocationRequest* pAllocationRequest);
4986 virtual bool MakeRequestedAllocationsLost(
4987 uint32_t currentFrameIndex,
4988 uint32_t frameInUseCount,
4989 VmaAllocationRequest* pAllocationRequest);
4991 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4993 virtual VkResult CheckCorruption(
const void* pBlockData);
4996 const VmaAllocationRequest& request,
4997 VmaSuballocationType type,
4998 VkDeviceSize allocSize,
5003 virtual void FreeAtOffset(VkDeviceSize offset);
5013 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5015 enum SECOND_VECTOR_MODE
5017 SECOND_VECTOR_EMPTY,
5022 SECOND_VECTOR_RING_BUFFER,
5028 SECOND_VECTOR_DOUBLE_STACK,
5031 VkDeviceSize m_SumFreeSize;
5032 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5033 uint32_t m_1stVectorIndex;
5034 SECOND_VECTOR_MODE m_2ndVectorMode;
5036 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5037 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5038 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5039 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5042 size_t m_1stNullItemsBeginCount;
5044 size_t m_1stNullItemsMiddleCount;
5046 size_t m_2ndNullItemsCount;
5048 bool ShouldCompact1st()
const;
5049 void CleanupAfterFree();
5063 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5065 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5068 virtual ~VmaBlockMetadata_Buddy();
5069 virtual void Init(VkDeviceSize size);
5071 virtual bool Validate()
const;
5072 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5073 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5074 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5075 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5077 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5078 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5080 #if VMA_STATS_STRING_ENABLED 5081 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5084 virtual bool CreateAllocationRequest(
5085 uint32_t currentFrameIndex,
5086 uint32_t frameInUseCount,
5087 VkDeviceSize bufferImageGranularity,
5088 VkDeviceSize allocSize,
5089 VkDeviceSize allocAlignment,
5091 VmaSuballocationType allocType,
5092 bool canMakeOtherLost,
5094 VmaAllocationRequest* pAllocationRequest);
5096 virtual bool MakeRequestedAllocationsLost(
5097 uint32_t currentFrameIndex,
5098 uint32_t frameInUseCount,
5099 VmaAllocationRequest* pAllocationRequest);
5101 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5103 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5106 const VmaAllocationRequest& request,
5107 VmaSuballocationType type,
5108 VkDeviceSize allocSize,
5112 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5113 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5116 static const VkDeviceSize MIN_NODE_SIZE = 32;
5117 static const size_t MAX_LEVELS = 30;
5119 struct ValidationContext
5121 size_t calculatedAllocationCount;
5122 size_t calculatedFreeCount;
5123 VkDeviceSize calculatedSumFreeSize;
5125 ValidationContext() :
5126 calculatedAllocationCount(0),
5127 calculatedFreeCount(0),
5128 calculatedSumFreeSize(0) { }
5133 VkDeviceSize offset;
5163 VkDeviceSize m_UsableSize;
5164 uint32_t m_LevelCount;
5170 } m_FreeList[MAX_LEVELS];
5172 size_t m_AllocationCount;
5176 VkDeviceSize m_SumFreeSize;
5178 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5179 void DeleteNode(Node* node);
5180 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5181 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5182 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5184 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5185 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5189 void AddToFreeListFront(uint32_t level, Node* node);
5193 void RemoveFromFreeList(uint32_t level, Node* node);
5195 #if VMA_STATS_STRING_ENABLED 5196 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5206 class VmaDeviceMemoryBlock
5208 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5210 VmaBlockMetadata* m_pMetadata;
5214 ~VmaDeviceMemoryBlock()
5216 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5217 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5223 uint32_t newMemoryTypeIndex,
5224 VkDeviceMemory newMemory,
5225 VkDeviceSize newSize,
5227 uint32_t algorithm);
5231 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5232 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5233 uint32_t GetId()
const {
return m_Id; }
5234 void* GetMappedData()
const {
return m_pMappedData; }
5237 bool Validate()
const;
5242 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5245 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5246 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5248 VkResult BindBufferMemory(
5252 VkResult BindImageMemory(
5258 uint32_t m_MemoryTypeIndex;
5260 VkDeviceMemory m_hMemory;
5265 uint32_t m_MapCount;
5266 void* m_pMappedData;
5269 struct VmaPointerLess
5271 bool operator()(
const void* lhs,
const void* rhs)
const 5277 class VmaDefragmentator;
5285 struct VmaBlockVector
5287 VMA_CLASS_NO_COPY(VmaBlockVector)
5291 uint32_t memoryTypeIndex,
5292 VkDeviceSize preferredBlockSize,
5293 size_t minBlockCount,
5294 size_t maxBlockCount,
5295 VkDeviceSize bufferImageGranularity,
5296 uint32_t frameInUseCount,
5298 bool explicitBlockSize,
5299 uint32_t algorithm);
5302 VkResult CreateMinBlocks();
5304 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5305 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5306 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5307 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5308 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5312 bool IsEmpty()
const {
return m_Blocks.empty(); }
5313 bool IsCorruptionDetectionEnabled()
const;
5317 uint32_t currentFrameIndex,
5319 VkDeviceSize alignment,
5321 VmaSuballocationType suballocType,
5330 #if VMA_STATS_STRING_ENABLED 5331 void PrintDetailedMap(
class VmaJsonWriter& json);
5334 void MakePoolAllocationsLost(
5335 uint32_t currentFrameIndex,
5336 size_t* pLostAllocationCount);
5337 VkResult CheckCorruption();
5339 VmaDefragmentator* EnsureDefragmentator(
5341 uint32_t currentFrameIndex);
5343 VkResult Defragment(
5345 VkDeviceSize& maxBytesToMove,
5346 uint32_t& maxAllocationsToMove);
5348 void DestroyDefragmentator();
5351 friend class VmaDefragmentator;
5354 const uint32_t m_MemoryTypeIndex;
5355 const VkDeviceSize m_PreferredBlockSize;
5356 const size_t m_MinBlockCount;
5357 const size_t m_MaxBlockCount;
5358 const VkDeviceSize m_BufferImageGranularity;
5359 const uint32_t m_FrameInUseCount;
5360 const bool m_IsCustomPool;
5361 const bool m_ExplicitBlockSize;
5362 const uint32_t m_Algorithm;
5363 bool m_HasEmptyBlock;
5366 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5370 VmaDefragmentator* m_pDefragmentator;
5371 uint32_t m_NextBlockId;
5373 VkDeviceSize CalcMaxBlockSize()
const;
5376 void Remove(VmaDeviceMemoryBlock* pBlock);
5380 void IncrementallySortBlocks();
5383 VkResult AllocateFromBlock(
5384 VmaDeviceMemoryBlock* pBlock,
5386 uint32_t currentFrameIndex,
5388 VkDeviceSize alignment,
5391 VmaSuballocationType suballocType,
5395 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5400 VMA_CLASS_NO_COPY(VmaPool_T)
5402 VmaBlockVector m_BlockVector;
5407 VkDeviceSize preferredBlockSize);
5410 uint32_t GetId()
const {
return m_Id; }
5411 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5413 #if VMA_STATS_STRING_ENABLED 5421 class VmaDefragmentator
5423 VMA_CLASS_NO_COPY(VmaDefragmentator)
5426 VmaBlockVector*
const m_pBlockVector;
5427 uint32_t m_CurrentFrameIndex;
5428 VkDeviceSize m_BytesMoved;
5429 uint32_t m_AllocationsMoved;
5431 struct AllocationInfo
5434 VkBool32* m_pChanged;
5437 m_hAllocation(VK_NULL_HANDLE),
5438 m_pChanged(VMA_NULL)
5443 struct AllocationInfoSizeGreater
5445 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5447 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5452 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5456 VmaDeviceMemoryBlock* m_pBlock;
5457 bool m_HasNonMovableAllocations;
5458 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5460 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5462 m_HasNonMovableAllocations(true),
5463 m_Allocations(pAllocationCallbacks),
5464 m_pMappedDataForDefragmentation(VMA_NULL)
5468 void CalcHasNonMovableAllocations()
5470 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5471 const size_t defragmentAllocCount = m_Allocations.size();
5472 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5475 void SortAllocationsBySizeDescecnding()
5477 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5480 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5485 void* m_pMappedDataForDefragmentation;
5488 struct BlockPointerLess
5490 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5492 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5494 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5496 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5502 struct BlockInfoCompareMoveDestination
5504 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5506 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5510 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5514 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5522 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5523 BlockInfoVector m_Blocks;
5525 VkResult DefragmentRound(
5526 VkDeviceSize maxBytesToMove,
5527 uint32_t maxAllocationsToMove);
5529 static bool MoveMakesSense(
5530 size_t dstBlockIndex, VkDeviceSize dstOffset,
5531 size_t srcBlockIndex, VkDeviceSize srcOffset);
5536 VmaBlockVector* pBlockVector,
5537 uint32_t currentFrameIndex);
5539 ~VmaDefragmentator();
5541 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5542 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5544 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5546 VkResult Defragment(
5547 VkDeviceSize maxBytesToMove,
5548 uint32_t maxAllocationsToMove);
5551 #if VMA_RECORDING_ENABLED 5558 void WriteConfiguration(
5559 const VkPhysicalDeviceProperties& devProps,
5560 const VkPhysicalDeviceMemoryProperties& memProps,
5561 bool dedicatedAllocationExtensionEnabled);
5564 void RecordCreateAllocator(uint32_t frameIndex);
5565 void RecordDestroyAllocator(uint32_t frameIndex);
5566 void RecordCreatePool(uint32_t frameIndex,
5569 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5570 void RecordAllocateMemory(uint32_t frameIndex,
5571 const VkMemoryRequirements& vkMemReq,
5574 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5575 const VkMemoryRequirements& vkMemReq,
5576 bool requiresDedicatedAllocation,
5577 bool prefersDedicatedAllocation,
5580 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5581 const VkMemoryRequirements& vkMemReq,
5582 bool requiresDedicatedAllocation,
5583 bool prefersDedicatedAllocation,
5586 void RecordFreeMemory(uint32_t frameIndex,
5588 void RecordSetAllocationUserData(uint32_t frameIndex,
5590 const void* pUserData);
5591 void RecordCreateLostAllocation(uint32_t frameIndex,
5593 void RecordMapMemory(uint32_t frameIndex,
5595 void RecordUnmapMemory(uint32_t frameIndex,
5597 void RecordFlushAllocation(uint32_t frameIndex,
5598 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5599 void RecordInvalidateAllocation(uint32_t frameIndex,
5600 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5601 void RecordCreateBuffer(uint32_t frameIndex,
5602 const VkBufferCreateInfo& bufCreateInfo,
5605 void RecordCreateImage(uint32_t frameIndex,
5606 const VkImageCreateInfo& imageCreateInfo,
5609 void RecordDestroyBuffer(uint32_t frameIndex,
5611 void RecordDestroyImage(uint32_t frameIndex,
5613 void RecordTouchAllocation(uint32_t frameIndex,
5615 void RecordGetAllocationInfo(uint32_t frameIndex,
5617 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5627 class UserDataString
5631 const char* GetString()
const {
return m_Str; }
5641 VMA_MUTEX m_FileMutex;
5643 int64_t m_StartCounter;
5645 void GetBasicParams(CallParams& outParams);
5649 #endif // #if VMA_RECORDING_ENABLED 5652 struct VmaAllocator_T
5654 VMA_CLASS_NO_COPY(VmaAllocator_T)
5657 bool m_UseKhrDedicatedAllocation;
5659 bool m_AllocationCallbacksSpecified;
5660 VkAllocationCallbacks m_AllocationCallbacks;
5664 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5665 VMA_MUTEX m_HeapSizeLimitMutex;
5667 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5668 VkPhysicalDeviceMemoryProperties m_MemProps;
5671 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5674 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5675 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5676 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5682 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5684 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5688 return m_VulkanFunctions;
5691 VkDeviceSize GetBufferImageGranularity()
const 5694 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5695 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5698 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5699 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5701 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5703 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5704 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5707 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5709 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5710 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5713 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5715 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5716 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5717 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5720 bool IsIntegratedGpu()
const 5722 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5725 #if VMA_RECORDING_ENABLED 5726 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5729 void GetBufferMemoryRequirements(
5731 VkMemoryRequirements& memReq,
5732 bool& requiresDedicatedAllocation,
5733 bool& prefersDedicatedAllocation)
const;
5734 void GetImageMemoryRequirements(
5736 VkMemoryRequirements& memReq,
5737 bool& requiresDedicatedAllocation,
5738 bool& prefersDedicatedAllocation)
const;
5741 VkResult AllocateMemory(
5742 const VkMemoryRequirements& vkMemReq,
5743 bool requiresDedicatedAllocation,
5744 bool prefersDedicatedAllocation,
5745 VkBuffer dedicatedBuffer,
5746 VkImage dedicatedImage,
5748 VmaSuballocationType suballocType,
5754 void CalculateStats(
VmaStats* pStats);
5756 #if VMA_STATS_STRING_ENABLED 5757 void PrintDetailedMap(
class VmaJsonWriter& json);
5760 VkResult Defragment(
5762 size_t allocationCount,
5763 VkBool32* pAllocationsChanged,
5771 void DestroyPool(
VmaPool pool);
5774 void SetCurrentFrameIndex(uint32_t frameIndex);
5775 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5777 void MakePoolAllocationsLost(
5779 size_t* pLostAllocationCount);
5780 VkResult CheckPoolCorruption(
VmaPool hPool);
5781 VkResult CheckCorruption(uint32_t memoryTypeBits);
5785 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5786 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5791 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5792 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5794 void FlushOrInvalidateAllocation(
5796 VkDeviceSize offset, VkDeviceSize size,
5797 VMA_CACHE_OPERATION op);
5799 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5802 VkDeviceSize m_PreferredLargeHeapBlockSize;
5804 VkPhysicalDevice m_PhysicalDevice;
5805 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5807 VMA_MUTEX m_PoolsMutex;
5809 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5810 uint32_t m_NextPoolId;
5814 #if VMA_RECORDING_ENABLED 5815 VmaRecorder* m_pRecorder;
5820 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5822 VkResult AllocateMemoryOfType(
5824 VkDeviceSize alignment,
5825 bool dedicatedAllocation,
5826 VkBuffer dedicatedBuffer,
5827 VkImage dedicatedImage,
5829 uint32_t memTypeIndex,
5830 VmaSuballocationType suballocType,
5834 VkResult AllocateDedicatedMemory(
5836 VmaSuballocationType suballocType,
5837 uint32_t memTypeIndex,
5839 bool isUserDataString,
5841 VkBuffer dedicatedBuffer,
5842 VkImage dedicatedImage,
5852 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5854 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5857 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5859 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5862 template<
typename T>
5865 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5868 template<
typename T>
5869 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5871 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5874 template<
typename T>
5875 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5880 VmaFree(hAllocator, ptr);
5884 template<
typename T>
5885 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5889 for(
size_t i = count; i--; )
5891 VmaFree(hAllocator, ptr);
5898 #if VMA_STATS_STRING_ENABLED 5900 class VmaStringBuilder
5903 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5904 size_t GetLength()
const {
return m_Data.size(); }
5905 const char* GetData()
const {
return m_Data.data(); }
5907 void Add(
char ch) { m_Data.push_back(ch); }
5908 void Add(
const char* pStr);
5909 void AddNewLine() { Add(
'\n'); }
5910 void AddNumber(uint32_t num);
5911 void AddNumber(uint64_t num);
5912 void AddPointer(
const void* ptr);
5915 VmaVector< char, VmaStlAllocator<char> > m_Data;
5918 void VmaStringBuilder::Add(
const char* pStr)
5920 const size_t strLen = strlen(pStr);
5923 const size_t oldCount = m_Data.size();
5924 m_Data.resize(oldCount + strLen);
5925 memcpy(m_Data.data() + oldCount, pStr, strLen);
5929 void VmaStringBuilder::AddNumber(uint32_t num)
5932 VmaUint32ToStr(buf,
sizeof(buf), num);
5936 void VmaStringBuilder::AddNumber(uint64_t num)
5939 VmaUint64ToStr(buf,
sizeof(buf), num);
5943 void VmaStringBuilder::AddPointer(
const void* ptr)
5946 VmaPtrToStr(buf,
sizeof(buf), ptr);
5950 #endif // #if VMA_STATS_STRING_ENABLED 5955 #if VMA_STATS_STRING_ENABLED 5959 VMA_CLASS_NO_COPY(VmaJsonWriter)
5961 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
5964 void BeginObject(
bool singleLine =
false);
5967 void BeginArray(
bool singleLine =
false);
5970 void WriteString(
const char* pStr);
5971 void BeginString(
const char* pStr = VMA_NULL);
5972 void ContinueString(
const char* pStr);
5973 void ContinueString(uint32_t n);
5974 void ContinueString(uint64_t n);
5975 void ContinueString_Pointer(
const void* ptr);
5976 void EndString(
const char* pStr = VMA_NULL);
5978 void WriteNumber(uint32_t n);
5979 void WriteNumber(uint64_t n);
5980 void WriteBool(
bool b);
5984 static const char*
const INDENT;
5986 enum COLLECTION_TYPE
5988 COLLECTION_TYPE_OBJECT,
5989 COLLECTION_TYPE_ARRAY,
5993 COLLECTION_TYPE type;
5994 uint32_t valueCount;
5995 bool singleLineMode;
5998 VmaStringBuilder& m_SB;
5999 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6000 bool m_InsideString;
6002 void BeginValue(
bool isString);
6003 void WriteIndent(
bool oneLess =
false);
6006 const char*
const VmaJsonWriter::INDENT =
" ";
6008 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6010 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6011 m_InsideString(false)
6015 VmaJsonWriter::~VmaJsonWriter()
6017 VMA_ASSERT(!m_InsideString);
6018 VMA_ASSERT(m_Stack.empty());
6021 void VmaJsonWriter::BeginObject(
bool singleLine)
6023 VMA_ASSERT(!m_InsideString);
6029 item.type = COLLECTION_TYPE_OBJECT;
6030 item.valueCount = 0;
6031 item.singleLineMode = singleLine;
6032 m_Stack.push_back(item);
6035 void VmaJsonWriter::EndObject()
6037 VMA_ASSERT(!m_InsideString);
6042 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6046 void VmaJsonWriter::BeginArray(
bool singleLine)
6048 VMA_ASSERT(!m_InsideString);
6054 item.type = COLLECTION_TYPE_ARRAY;
6055 item.valueCount = 0;
6056 item.singleLineMode = singleLine;
6057 m_Stack.push_back(item);
6060 void VmaJsonWriter::EndArray()
6062 VMA_ASSERT(!m_InsideString);
6067 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6071 void VmaJsonWriter::WriteString(
const char* pStr)
6077 void VmaJsonWriter::BeginString(
const char* pStr)
6079 VMA_ASSERT(!m_InsideString);
6083 m_InsideString =
true;
6084 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6086 ContinueString(pStr);
6090 void VmaJsonWriter::ContinueString(
const char* pStr)
6092 VMA_ASSERT(m_InsideString);
6094 const size_t strLen = strlen(pStr);
6095 for(
size_t i = 0; i < strLen; ++i)
6128 VMA_ASSERT(0 &&
"Character not currently supported.");
6134 void VmaJsonWriter::ContinueString(uint32_t n)
6136 VMA_ASSERT(m_InsideString);
6140 void VmaJsonWriter::ContinueString(uint64_t n)
6142 VMA_ASSERT(m_InsideString);
6146 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6148 VMA_ASSERT(m_InsideString);
6149 m_SB.AddPointer(ptr);
6152 void VmaJsonWriter::EndString(
const char* pStr)
6154 VMA_ASSERT(m_InsideString);
6155 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6157 ContinueString(pStr);
6160 m_InsideString =
false;
6163 void VmaJsonWriter::WriteNumber(uint32_t n)
6165 VMA_ASSERT(!m_InsideString);
6170 void VmaJsonWriter::WriteNumber(uint64_t n)
6172 VMA_ASSERT(!m_InsideString);
6177 void VmaJsonWriter::WriteBool(
bool b)
6179 VMA_ASSERT(!m_InsideString);
6181 m_SB.Add(b ?
"true" :
"false");
6184 void VmaJsonWriter::WriteNull()
6186 VMA_ASSERT(!m_InsideString);
6191 void VmaJsonWriter::BeginValue(
bool isString)
6193 if(!m_Stack.empty())
6195 StackItem& currItem = m_Stack.back();
6196 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6197 currItem.valueCount % 2 == 0)
6199 VMA_ASSERT(isString);
6202 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6203 currItem.valueCount % 2 != 0)
6207 else if(currItem.valueCount > 0)
6216 ++currItem.valueCount;
6220 void VmaJsonWriter::WriteIndent(
bool oneLess)
6222 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6226 size_t count = m_Stack.size();
6227 if(count > 0 && oneLess)
6231 for(
size_t i = 0; i < count; ++i)
6238 #endif // #if VMA_STATS_STRING_ENABLED 6242 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
6244 if(IsUserDataString())
6246 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
6248 FreeUserDataString(hAllocator);
6250 if(pUserData != VMA_NULL)
6252 const char*
const newStrSrc = (
char*)pUserData;
6253 const size_t newStrLen = strlen(newStrSrc);
6254 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
6255 memcpy(newStrDst, newStrSrc, newStrLen + 1);
6256 m_pUserData = newStrDst;
6261 m_pUserData = pUserData;
6265 void VmaAllocation_T::ChangeBlockAllocation(
6267 VmaDeviceMemoryBlock* block,
6268 VkDeviceSize offset)
6270 VMA_ASSERT(block != VMA_NULL);
6271 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6274 if(block != m_BlockAllocation.m_Block)
6276 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
6277 if(IsPersistentMap())
6279 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
6280 block->Map(hAllocator, mapRefCount, VMA_NULL);
6283 m_BlockAllocation.m_Block = block;
6284 m_BlockAllocation.m_Offset = offset;
6287 VkDeviceSize VmaAllocation_T::GetOffset()
const 6291 case ALLOCATION_TYPE_BLOCK:
6292 return m_BlockAllocation.m_Offset;
6293 case ALLOCATION_TYPE_DEDICATED:
6301 VkDeviceMemory VmaAllocation_T::GetMemory()
const 6305 case ALLOCATION_TYPE_BLOCK:
6306 return m_BlockAllocation.m_Block->GetDeviceMemory();
6307 case ALLOCATION_TYPE_DEDICATED:
6308 return m_DedicatedAllocation.m_hMemory;
6311 return VK_NULL_HANDLE;
6315 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 6319 case ALLOCATION_TYPE_BLOCK:
6320 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
6321 case ALLOCATION_TYPE_DEDICATED:
6322 return m_DedicatedAllocation.m_MemoryTypeIndex;
6329 void* VmaAllocation_T::GetMappedData()
const 6333 case ALLOCATION_TYPE_BLOCK:
6336 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
6337 VMA_ASSERT(pBlockData != VMA_NULL);
6338 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
6345 case ALLOCATION_TYPE_DEDICATED:
6346 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
6347 return m_DedicatedAllocation.m_pMappedData;
6354 bool VmaAllocation_T::CanBecomeLost()
const 6358 case ALLOCATION_TYPE_BLOCK:
6359 return m_BlockAllocation.m_CanBecomeLost;
6360 case ALLOCATION_TYPE_DEDICATED:
6368 VmaPool VmaAllocation_T::GetPool()
const 6370 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6371 return m_BlockAllocation.m_hPool;
6374 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6376 VMA_ASSERT(CanBecomeLost());
6382 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6385 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6390 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6396 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6406 #if VMA_STATS_STRING_ENABLED 6409 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6418 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6420 json.WriteString(
"Type");
6421 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6423 json.WriteString(
"Size");
6424 json.WriteNumber(m_Size);
6426 if(m_pUserData != VMA_NULL)
6428 json.WriteString(
"UserData");
6429 if(IsUserDataString())
6431 json.WriteString((
const char*)m_pUserData);
6436 json.ContinueString_Pointer(m_pUserData);
6441 json.WriteString(
"CreationFrameIndex");
6442 json.WriteNumber(m_CreationFrameIndex);
6444 json.WriteString(
"LastUseFrameIndex");
6445 json.WriteNumber(GetLastUseFrameIndex());
6447 if(m_BufferImageUsage != 0)
6449 json.WriteString(
"Usage");
6450 json.WriteNumber(m_BufferImageUsage);
6456 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6458 VMA_ASSERT(IsUserDataString());
6459 if(m_pUserData != VMA_NULL)
6461 char*
const oldStr = (
char*)m_pUserData;
6462 const size_t oldStrLen = strlen(oldStr);
6463 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6464 m_pUserData = VMA_NULL;
6468 void VmaAllocation_T::BlockAllocMap()
6470 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6472 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6478 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6482 void VmaAllocation_T::BlockAllocUnmap()
6484 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6486 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6492 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6496 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6498 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6502 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6504 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6505 *ppData = m_DedicatedAllocation.m_pMappedData;
6511 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6512 return VK_ERROR_MEMORY_MAP_FAILED;
6517 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6518 hAllocator->m_hDevice,
6519 m_DedicatedAllocation.m_hMemory,
6524 if(result == VK_SUCCESS)
6526 m_DedicatedAllocation.m_pMappedData = *ppData;
6533 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6535 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6537 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6542 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6543 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6544 hAllocator->m_hDevice,
6545 m_DedicatedAllocation.m_hMemory);
6550 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6554 #if VMA_STATS_STRING_ENABLED 6556 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6560 json.WriteString(
"Blocks");
6563 json.WriteString(
"Allocations");
6566 json.WriteString(
"UnusedRanges");
6569 json.WriteString(
"UsedBytes");
6572 json.WriteString(
"UnusedBytes");
6577 json.WriteString(
"AllocationSize");
6578 json.BeginObject(
true);
6579 json.WriteString(
"Min");
6581 json.WriteString(
"Avg");
6583 json.WriteString(
"Max");
6590 json.WriteString(
"UnusedRangeSize");
6591 json.BeginObject(
true);
6592 json.WriteString(
"Min");
6594 json.WriteString(
"Avg");
6596 json.WriteString(
"Max");
6604 #endif // #if VMA_STATS_STRING_ENABLED 6606 struct VmaSuballocationItemSizeLess
6609 const VmaSuballocationList::iterator lhs,
6610 const VmaSuballocationList::iterator rhs)
const 6612 return lhs->size < rhs->size;
6615 const VmaSuballocationList::iterator lhs,
6616 VkDeviceSize rhsSize)
const 6618 return lhs->size < rhsSize;
6626 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
6628 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
6632 #if VMA_STATS_STRING_ENABLED 6634 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6635 VkDeviceSize unusedBytes,
6636 size_t allocationCount,
6637 size_t unusedRangeCount)
const 6641 json.WriteString(
"TotalBytes");
6642 json.WriteNumber(GetSize());
6644 json.WriteString(
"UnusedBytes");
6645 json.WriteNumber(unusedBytes);
6647 json.WriteString(
"Allocations");
6648 json.WriteNumber((uint64_t)allocationCount);
6650 json.WriteString(
"UnusedRanges");
6651 json.WriteNumber((uint64_t)unusedRangeCount);
6653 json.WriteString(
"Suballocations");
6657 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6658 VkDeviceSize offset,
6661 json.BeginObject(
true);
6663 json.WriteString(
"Offset");
6664 json.WriteNumber(offset);
6666 hAllocation->PrintParameters(json);
6671 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6672 VkDeviceSize offset,
6673 VkDeviceSize size)
const 6675 json.BeginObject(
true);
6677 json.WriteString(
"Offset");
6678 json.WriteNumber(offset);
6680 json.WriteString(
"Type");
6681 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6683 json.WriteString(
"Size");
6684 json.WriteNumber(size);
6689 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6695 #endif // #if VMA_STATS_STRING_ENABLED 6700 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6701 VmaBlockMetadata(hAllocator),
6704 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6705 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6709 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6713 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6715 VmaBlockMetadata::Init(size);
6718 m_SumFreeSize = size;
6720 VmaSuballocation suballoc = {};
6721 suballoc.offset = 0;
6722 suballoc.size = size;
6723 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6724 suballoc.hAllocation = VK_NULL_HANDLE;
6726 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
6727 m_Suballocations.push_back(suballoc);
6728 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6730 m_FreeSuballocationsBySize.push_back(suballocItem);
6733 bool VmaBlockMetadata_Generic::Validate()
const 6735 VMA_VALIDATE(!m_Suballocations.empty());
6738 VkDeviceSize calculatedOffset = 0;
6740 uint32_t calculatedFreeCount = 0;
6742 VkDeviceSize calculatedSumFreeSize = 0;
6745 size_t freeSuballocationsToRegister = 0;
6747 bool prevFree =
false;
6749 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6750 suballocItem != m_Suballocations.cend();
6753 const VmaSuballocation& subAlloc = *suballocItem;
6756 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
6758 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6760 VMA_VALIDATE(!prevFree || !currFree);
6762 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
6766 calculatedSumFreeSize += subAlloc.size;
6767 ++calculatedFreeCount;
6768 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6770 ++freeSuballocationsToRegister;
6774 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
6778 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
6779 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
6782 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
6785 calculatedOffset += subAlloc.size;
6786 prevFree = currFree;
6791 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
6793 VkDeviceSize lastSize = 0;
6794 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6796 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6799 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6801 VMA_VALIDATE(suballocItem->size >= lastSize);
6803 lastSize = suballocItem->size;
6807 VMA_VALIDATE(ValidateFreeSuballocationList());
6808 VMA_VALIDATE(calculatedOffset == GetSize());
6809 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
6810 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
6815 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6817 if(!m_FreeSuballocationsBySize.empty())
6819 return m_FreeSuballocationsBySize.back()->size;
6827 bool VmaBlockMetadata_Generic::IsEmpty()
const 6829 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6832 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6836 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6848 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6849 suballocItem != m_Suballocations.cend();
6852 const VmaSuballocation& suballoc = *suballocItem;
6853 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6866 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6868 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6870 inoutStats.
size += GetSize();
6877 #if VMA_STATS_STRING_ENABLED 6879 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6881 PrintDetailedMap_Begin(json,
6883 m_Suballocations.size() - (size_t)m_FreeCount,
6887 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6888 suballocItem != m_Suballocations.cend();
6889 ++suballocItem, ++i)
6891 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6893 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6897 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6901 PrintDetailedMap_End(json);
6904 #endif // #if VMA_STATS_STRING_ENABLED 6906 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6907 uint32_t currentFrameIndex,
6908 uint32_t frameInUseCount,
6909 VkDeviceSize bufferImageGranularity,
6910 VkDeviceSize allocSize,
6911 VkDeviceSize allocAlignment,
6913 VmaSuballocationType allocType,
6914 bool canMakeOtherLost,
6916 VmaAllocationRequest* pAllocationRequest)
6918 VMA_ASSERT(allocSize > 0);
6919 VMA_ASSERT(!upperAddress);
6920 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6921 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6922 VMA_HEAVY_ASSERT(Validate());
6925 if(canMakeOtherLost ==
false &&
6926 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6932 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6933 if(freeSuballocCount > 0)
6938 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6939 m_FreeSuballocationsBySize.data(),
6940 m_FreeSuballocationsBySize.data() + freeSuballocCount,
6941 allocSize + 2 * VMA_DEBUG_MARGIN,
6942 VmaSuballocationItemSizeLess());
6943 size_t index = it - m_FreeSuballocationsBySize.data();
6944 for(; index < freeSuballocCount; ++index)
6949 bufferImageGranularity,
6953 m_FreeSuballocationsBySize[index],
6955 &pAllocationRequest->offset,
6956 &pAllocationRequest->itemsToMakeLostCount,
6957 &pAllocationRequest->sumFreeSize,
6958 &pAllocationRequest->sumItemSize))
6960 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6968 for(
size_t index = freeSuballocCount; index--; )
6973 bufferImageGranularity,
6977 m_FreeSuballocationsBySize[index],
6979 &pAllocationRequest->offset,
6980 &pAllocationRequest->itemsToMakeLostCount,
6981 &pAllocationRequest->sumFreeSize,
6982 &pAllocationRequest->sumItemSize))
6984 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6991 if(canMakeOtherLost)
6995 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
6996 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
6998 VmaAllocationRequest tmpAllocRequest = {};
6999 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7000 suballocIt != m_Suballocations.end();
7003 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7004 suballocIt->hAllocation->CanBecomeLost())
7009 bufferImageGranularity,
7015 &tmpAllocRequest.offset,
7016 &tmpAllocRequest.itemsToMakeLostCount,
7017 &tmpAllocRequest.sumFreeSize,
7018 &tmpAllocRequest.sumItemSize))
7020 tmpAllocRequest.item = suballocIt;
7022 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7025 *pAllocationRequest = tmpAllocRequest;
7031 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7040 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7041 uint32_t currentFrameIndex,
7042 uint32_t frameInUseCount,
7043 VmaAllocationRequest* pAllocationRequest)
7045 while(pAllocationRequest->itemsToMakeLostCount > 0)
7047 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7049 ++pAllocationRequest->item;
7051 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7052 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7053 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7054 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7056 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7057 --pAllocationRequest->itemsToMakeLostCount;
7065 VMA_HEAVY_ASSERT(Validate());
7066 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7067 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7072 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7074 uint32_t lostAllocationCount = 0;
7075 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7076 it != m_Suballocations.end();
7079 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7080 it->hAllocation->CanBecomeLost() &&
7081 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7083 it = FreeSuballocation(it);
7084 ++lostAllocationCount;
7087 return lostAllocationCount;
7090 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7092 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7093 it != m_Suballocations.end();
7096 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7098 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7100 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7101 return VK_ERROR_VALIDATION_FAILED_EXT;
7103 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7105 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7106 return VK_ERROR_VALIDATION_FAILED_EXT;
7114 void VmaBlockMetadata_Generic::Alloc(
7115 const VmaAllocationRequest& request,
7116 VmaSuballocationType type,
7117 VkDeviceSize allocSize,
7121 VMA_ASSERT(!upperAddress);
7122 VMA_ASSERT(request.item != m_Suballocations.end());
7123 VmaSuballocation& suballoc = *request.item;
7125 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7127 VMA_ASSERT(request.offset >= suballoc.offset);
7128 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7129 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7130 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7134 UnregisterFreeSuballocation(request.item);
7136 suballoc.offset = request.offset;
7137 suballoc.size = allocSize;
7138 suballoc.type = type;
7139 suballoc.hAllocation = hAllocation;
7144 VmaSuballocation paddingSuballoc = {};
7145 paddingSuballoc.offset = request.offset + allocSize;
7146 paddingSuballoc.size = paddingEnd;
7147 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7148 VmaSuballocationList::iterator next = request.item;
7150 const VmaSuballocationList::iterator paddingEndItem =
7151 m_Suballocations.insert(next, paddingSuballoc);
7152 RegisterFreeSuballocation(paddingEndItem);
7158 VmaSuballocation paddingSuballoc = {};
7159 paddingSuballoc.offset = request.offset - paddingBegin;
7160 paddingSuballoc.size = paddingBegin;
7161 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7162 const VmaSuballocationList::iterator paddingBeginItem =
7163 m_Suballocations.insert(request.item, paddingSuballoc);
7164 RegisterFreeSuballocation(paddingBeginItem);
7168 m_FreeCount = m_FreeCount - 1;
7169 if(paddingBegin > 0)
7177 m_SumFreeSize -= allocSize;
7180 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7182 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7183 suballocItem != m_Suballocations.end();
7186 VmaSuballocation& suballoc = *suballocItem;
7187 if(suballoc.hAllocation == allocation)
7189 FreeSuballocation(suballocItem);
7190 VMA_HEAVY_ASSERT(Validate());
7194 VMA_ASSERT(0 &&
"Not found!");
7197 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7199 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7200 suballocItem != m_Suballocations.end();
7203 VmaSuballocation& suballoc = *suballocItem;
7204 if(suballoc.offset == offset)
7206 FreeSuballocation(suballocItem);
7210 VMA_ASSERT(0 &&
"Not found!");
7213 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 7215 VkDeviceSize lastSize = 0;
7216 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7218 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7220 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
7221 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7222 VMA_VALIDATE(it->size >= lastSize);
7223 lastSize = it->size;
7228 bool VmaBlockMetadata_Generic::CheckAllocation(
7229 uint32_t currentFrameIndex,
7230 uint32_t frameInUseCount,
7231 VkDeviceSize bufferImageGranularity,
7232 VkDeviceSize allocSize,
7233 VkDeviceSize allocAlignment,
7234 VmaSuballocationType allocType,
7235 VmaSuballocationList::const_iterator suballocItem,
7236 bool canMakeOtherLost,
7237 VkDeviceSize* pOffset,
7238 size_t* itemsToMakeLostCount,
7239 VkDeviceSize* pSumFreeSize,
7240 VkDeviceSize* pSumItemSize)
const 7242 VMA_ASSERT(allocSize > 0);
7243 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7244 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7245 VMA_ASSERT(pOffset != VMA_NULL);
7247 *itemsToMakeLostCount = 0;
7251 if(canMakeOtherLost)
7253 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7255 *pSumFreeSize = suballocItem->size;
7259 if(suballocItem->hAllocation->CanBecomeLost() &&
7260 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7262 ++*itemsToMakeLostCount;
7263 *pSumItemSize = suballocItem->size;
7272 if(GetSize() - suballocItem->offset < allocSize)
7278 *pOffset = suballocItem->offset;
7281 if(VMA_DEBUG_MARGIN > 0)
7283 *pOffset += VMA_DEBUG_MARGIN;
7287 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7291 if(bufferImageGranularity > 1)
7293 bool bufferImageGranularityConflict =
false;
7294 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7295 while(prevSuballocItem != m_Suballocations.cbegin())
7298 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7299 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7301 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7303 bufferImageGranularityConflict =
true;
7311 if(bufferImageGranularityConflict)
7313 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7319 if(*pOffset >= suballocItem->offset + suballocItem->size)
7325 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
7328 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7330 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
7332 if(suballocItem->offset + totalSize > GetSize())
7339 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7340 if(totalSize > suballocItem->size)
7342 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7343 while(remainingSize > 0)
7346 if(lastSuballocItem == m_Suballocations.cend())
7350 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7352 *pSumFreeSize += lastSuballocItem->size;
7356 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7357 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7358 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7360 ++*itemsToMakeLostCount;
7361 *pSumItemSize += lastSuballocItem->size;
7368 remainingSize = (lastSuballocItem->size < remainingSize) ?
7369 remainingSize - lastSuballocItem->size : 0;
7375 if(bufferImageGranularity > 1)
7377 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7379 while(nextSuballocItem != m_Suballocations.cend())
7381 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7382 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7384 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7386 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7387 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7388 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7390 ++*itemsToMakeLostCount;
7409 const VmaSuballocation& suballoc = *suballocItem;
7410 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7412 *pSumFreeSize = suballoc.size;
7415 if(suballoc.size < allocSize)
7421 *pOffset = suballoc.offset;
7424 if(VMA_DEBUG_MARGIN > 0)
7426 *pOffset += VMA_DEBUG_MARGIN;
7430 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7434 if(bufferImageGranularity > 1)
7436 bool bufferImageGranularityConflict =
false;
7437 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7438 while(prevSuballocItem != m_Suballocations.cbegin())
7441 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7442 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7444 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7446 bufferImageGranularityConflict =
true;
7454 if(bufferImageGranularityConflict)
7456 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7461 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7464 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7467 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7474 if(bufferImageGranularity > 1)
7476 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7478 while(nextSuballocItem != m_Suballocations.cend())
7480 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7481 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7483 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7502 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7504 VMA_ASSERT(item != m_Suballocations.end());
7505 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7507 VmaSuballocationList::iterator nextItem = item;
7509 VMA_ASSERT(nextItem != m_Suballocations.end());
7510 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7512 item->size += nextItem->size;
7514 m_Suballocations.erase(nextItem);
7517 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7520 VmaSuballocation& suballoc = *suballocItem;
7521 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7522 suballoc.hAllocation = VK_NULL_HANDLE;
7526 m_SumFreeSize += suballoc.size;
7529 bool mergeWithNext =
false;
7530 bool mergeWithPrev =
false;
7532 VmaSuballocationList::iterator nextItem = suballocItem;
7534 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7536 mergeWithNext =
true;
7539 VmaSuballocationList::iterator prevItem = suballocItem;
7540 if(suballocItem != m_Suballocations.begin())
7543 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7545 mergeWithPrev =
true;
7551 UnregisterFreeSuballocation(nextItem);
7552 MergeFreeWithNext(suballocItem);
7557 UnregisterFreeSuballocation(prevItem);
7558 MergeFreeWithNext(prevItem);
7559 RegisterFreeSuballocation(prevItem);
7564 RegisterFreeSuballocation(suballocItem);
7565 return suballocItem;
7569 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7571 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7572 VMA_ASSERT(item->size > 0);
7576 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7578 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7580 if(m_FreeSuballocationsBySize.empty())
7582 m_FreeSuballocationsBySize.push_back(item);
7586 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7594 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7596 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7597 VMA_ASSERT(item->size > 0);
7601 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7603 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7605 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7606 m_FreeSuballocationsBySize.data(),
7607 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7609 VmaSuballocationItemSizeLess());
7610 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7611 index < m_FreeSuballocationsBySize.size();
7614 if(m_FreeSuballocationsBySize[index] == item)
7616 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7619 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7621 VMA_ASSERT(0 &&
"Not found.");
7630 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7631 VmaBlockMetadata(hAllocator),
7633 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7634 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7635 m_1stVectorIndex(0),
7636 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7637 m_1stNullItemsBeginCount(0),
7638 m_1stNullItemsMiddleCount(0),
7639 m_2ndNullItemsCount(0)
7643 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7647 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7649 VmaBlockMetadata::Init(size);
7650 m_SumFreeSize = size;
7653 bool VmaBlockMetadata_Linear::Validate()
const 7655 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7656 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7658 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
7659 VMA_VALIDATE(!suballocations1st.empty() ||
7660 suballocations2nd.empty() ||
7661 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
7663 if(!suballocations1st.empty())
7666 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
7668 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
7670 if(!suballocations2nd.empty())
7673 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
7676 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
7677 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
7679 VkDeviceSize sumUsedSize = 0;
7680 const size_t suballoc1stCount = suballocations1st.size();
7681 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7683 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7685 const size_t suballoc2ndCount = suballocations2nd.size();
7686 size_t nullItem2ndCount = 0;
7687 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7689 const VmaSuballocation& suballoc = suballocations2nd[i];
7690 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7692 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7693 VMA_VALIDATE(suballoc.offset >= offset);
7697 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7698 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7699 sumUsedSize += suballoc.size;
7706 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7709 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7712 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7714 const VmaSuballocation& suballoc = suballocations1st[i];
7715 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
7716 suballoc.hAllocation == VK_NULL_HANDLE);
7719 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7721 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7723 const VmaSuballocation& suballoc = suballocations1st[i];
7724 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7726 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7727 VMA_VALIDATE(suballoc.offset >= offset);
7728 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
7732 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7733 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7734 sumUsedSize += suballoc.size;
7741 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7743 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
7745 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7747 const size_t suballoc2ndCount = suballocations2nd.size();
7748 size_t nullItem2ndCount = 0;
7749 for(
size_t i = suballoc2ndCount; i--; )
7751 const VmaSuballocation& suballoc = suballocations2nd[i];
7752 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7754 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7755 VMA_VALIDATE(suballoc.offset >= offset);
7759 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7760 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7761 sumUsedSize += suballoc.size;
7768 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7771 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7774 VMA_VALIDATE(offset <= GetSize());
7775 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
7780 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7782 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7783 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7786 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7788 const VkDeviceSize size = GetSize();
7800 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7802 switch(m_2ndVectorMode)
7804 case SECOND_VECTOR_EMPTY:
7810 const size_t suballocations1stCount = suballocations1st.size();
7811 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
7812 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
7813 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
7815 firstSuballoc.offset,
7816 size - (lastSuballoc.offset + lastSuballoc.size));
7820 case SECOND_VECTOR_RING_BUFFER:
7825 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7826 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
7827 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
7828 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
7832 case SECOND_VECTOR_DOUBLE_STACK:
7837 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7838 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
7839 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
7840 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
7850 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7852 const VkDeviceSize size = GetSize();
7853 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7854 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7855 const size_t suballoc1stCount = suballocations1st.size();
7856 const size_t suballoc2ndCount = suballocations2nd.size();
7867 VkDeviceSize lastOffset = 0;
7869 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7871 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7872 size_t nextAlloc2ndIndex = 0;
7873 while(lastOffset < freeSpace2ndTo1stEnd)
7876 while(nextAlloc2ndIndex < suballoc2ndCount &&
7877 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7879 ++nextAlloc2ndIndex;
7883 if(nextAlloc2ndIndex < suballoc2ndCount)
7885 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7888 if(lastOffset < suballoc.offset)
7891 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7905 lastOffset = suballoc.offset + suballoc.size;
7906 ++nextAlloc2ndIndex;
7912 if(lastOffset < freeSpace2ndTo1stEnd)
7914 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7922 lastOffset = freeSpace2ndTo1stEnd;
7927 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7928 const VkDeviceSize freeSpace1stTo2ndEnd =
7929 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
7930 while(lastOffset < freeSpace1stTo2ndEnd)
7933 while(nextAlloc1stIndex < suballoc1stCount &&
7934 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
7936 ++nextAlloc1stIndex;
7940 if(nextAlloc1stIndex < suballoc1stCount)
7942 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
7945 if(lastOffset < suballoc.offset)
7948 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7962 lastOffset = suballoc.offset + suballoc.size;
7963 ++nextAlloc1stIndex;
7969 if(lastOffset < freeSpace1stTo2ndEnd)
7971 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
7979 lastOffset = freeSpace1stTo2ndEnd;
7983 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7985 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
7986 while(lastOffset < size)
7989 while(nextAlloc2ndIndex != SIZE_MAX &&
7990 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7992 --nextAlloc2ndIndex;
7996 if(nextAlloc2ndIndex != SIZE_MAX)
7998 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8001 if(lastOffset < suballoc.offset)
8004 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8018 lastOffset = suballoc.offset + suballoc.size;
8019 --nextAlloc2ndIndex;
8025 if(lastOffset < size)
8027 const VkDeviceSize unusedRangeSize = size - lastOffset;
8043 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8045 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8046 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8047 const VkDeviceSize size = GetSize();
8048 const size_t suballoc1stCount = suballocations1st.size();
8049 const size_t suballoc2ndCount = suballocations2nd.size();
8051 inoutStats.
size += size;
8053 VkDeviceSize lastOffset = 0;
8055 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8057 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8058 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8059 while(lastOffset < freeSpace2ndTo1stEnd)
8062 while(nextAlloc2ndIndex < suballoc2ndCount &&
8063 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8065 ++nextAlloc2ndIndex;
8069 if(nextAlloc2ndIndex < suballoc2ndCount)
8071 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8074 if(lastOffset < suballoc.offset)
8077 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8088 lastOffset = suballoc.offset + suballoc.size;
8089 ++nextAlloc2ndIndex;
8094 if(lastOffset < freeSpace2ndTo1stEnd)
8097 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8104 lastOffset = freeSpace2ndTo1stEnd;
8109 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8110 const VkDeviceSize freeSpace1stTo2ndEnd =
8111 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8112 while(lastOffset < freeSpace1stTo2ndEnd)
8115 while(nextAlloc1stIndex < suballoc1stCount &&
8116 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8118 ++nextAlloc1stIndex;
8122 if(nextAlloc1stIndex < suballoc1stCount)
8124 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8127 if(lastOffset < suballoc.offset)
8130 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8141 lastOffset = suballoc.offset + suballoc.size;
8142 ++nextAlloc1stIndex;
8147 if(lastOffset < freeSpace1stTo2ndEnd)
8150 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8157 lastOffset = freeSpace1stTo2ndEnd;
8161 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8163 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8164 while(lastOffset < size)
8167 while(nextAlloc2ndIndex != SIZE_MAX &&
8168 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8170 --nextAlloc2ndIndex;
8174 if(nextAlloc2ndIndex != SIZE_MAX)
8176 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8179 if(lastOffset < suballoc.offset)
8182 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8193 lastOffset = suballoc.offset + suballoc.size;
8194 --nextAlloc2ndIndex;
8199 if(lastOffset < size)
8202 const VkDeviceSize unusedRangeSize = size - lastOffset;
8215 #if VMA_STATS_STRING_ENABLED 8216 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 8218 const VkDeviceSize size = GetSize();
8219 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8220 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8221 const size_t suballoc1stCount = suballocations1st.size();
8222 const size_t suballoc2ndCount = suballocations2nd.size();
8226 size_t unusedRangeCount = 0;
8227 VkDeviceSize usedBytes = 0;
8229 VkDeviceSize lastOffset = 0;
8231 size_t alloc2ndCount = 0;
8232 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8234 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8235 size_t nextAlloc2ndIndex = 0;
8236 while(lastOffset < freeSpace2ndTo1stEnd)
8239 while(nextAlloc2ndIndex < suballoc2ndCount &&
8240 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8242 ++nextAlloc2ndIndex;
8246 if(nextAlloc2ndIndex < suballoc2ndCount)
8248 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8251 if(lastOffset < suballoc.offset)
8260 usedBytes += suballoc.size;
8263 lastOffset = suballoc.offset + suballoc.size;
8264 ++nextAlloc2ndIndex;
8269 if(lastOffset < freeSpace2ndTo1stEnd)
8276 lastOffset = freeSpace2ndTo1stEnd;
8281 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8282 size_t alloc1stCount = 0;
8283 const VkDeviceSize freeSpace1stTo2ndEnd =
8284 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8285 while(lastOffset < freeSpace1stTo2ndEnd)
8288 while(nextAlloc1stIndex < suballoc1stCount &&
8289 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8291 ++nextAlloc1stIndex;
8295 if(nextAlloc1stIndex < suballoc1stCount)
8297 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8300 if(lastOffset < suballoc.offset)
8309 usedBytes += suballoc.size;
8312 lastOffset = suballoc.offset + suballoc.size;
8313 ++nextAlloc1stIndex;
8318 if(lastOffset < size)
8325 lastOffset = freeSpace1stTo2ndEnd;
8329 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8331 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8332 while(lastOffset < size)
8335 while(nextAlloc2ndIndex != SIZE_MAX &&
8336 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8338 --nextAlloc2ndIndex;
8342 if(nextAlloc2ndIndex != SIZE_MAX)
8344 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8347 if(lastOffset < suballoc.offset)
8356 usedBytes += suballoc.size;
8359 lastOffset = suballoc.offset + suballoc.size;
8360 --nextAlloc2ndIndex;
8365 if(lastOffset < size)
8377 const VkDeviceSize unusedBytes = size - usedBytes;
8378 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8383 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8385 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8386 size_t nextAlloc2ndIndex = 0;
8387 while(lastOffset < freeSpace2ndTo1stEnd)
8390 while(nextAlloc2ndIndex < suballoc2ndCount &&
8391 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8393 ++nextAlloc2ndIndex;
8397 if(nextAlloc2ndIndex < suballoc2ndCount)
8399 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8402 if(lastOffset < suballoc.offset)
8405 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8406 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8411 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8414 lastOffset = suballoc.offset + suballoc.size;
8415 ++nextAlloc2ndIndex;
8420 if(lastOffset < freeSpace2ndTo1stEnd)
8423 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8424 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8428 lastOffset = freeSpace2ndTo1stEnd;
8433 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8434 while(lastOffset < freeSpace1stTo2ndEnd)
8437 while(nextAlloc1stIndex < suballoc1stCount &&
8438 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8440 ++nextAlloc1stIndex;
8444 if(nextAlloc1stIndex < suballoc1stCount)
8446 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8449 if(lastOffset < suballoc.offset)
8452 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8453 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8458 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8461 lastOffset = suballoc.offset + suballoc.size;
8462 ++nextAlloc1stIndex;
8467 if(lastOffset < freeSpace1stTo2ndEnd)
8470 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8471 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8475 lastOffset = freeSpace1stTo2ndEnd;
8479 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8481 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8482 while(lastOffset < size)
8485 while(nextAlloc2ndIndex != SIZE_MAX &&
8486 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8488 --nextAlloc2ndIndex;
8492 if(nextAlloc2ndIndex != SIZE_MAX)
8494 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8497 if(lastOffset < suballoc.offset)
8500 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8501 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8506 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8509 lastOffset = suballoc.offset + suballoc.size;
8510 --nextAlloc2ndIndex;
8515 if(lastOffset < size)
8518 const VkDeviceSize unusedRangeSize = size - lastOffset;
8519 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8528 PrintDetailedMap_End(json);
8530 #endif // #if VMA_STATS_STRING_ENABLED 8532 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8533 uint32_t currentFrameIndex,
8534 uint32_t frameInUseCount,
8535 VkDeviceSize bufferImageGranularity,
8536 VkDeviceSize allocSize,
8537 VkDeviceSize allocAlignment,
8539 VmaSuballocationType allocType,
8540 bool canMakeOtherLost,
8542 VmaAllocationRequest* pAllocationRequest)
8544 VMA_ASSERT(allocSize > 0);
8545 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8546 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8547 VMA_HEAVY_ASSERT(Validate());
8549 const VkDeviceSize size = GetSize();
8550 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8551 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8555 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8557 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8562 if(allocSize > size)
8566 VkDeviceSize resultBaseOffset = size - allocSize;
8567 if(!suballocations2nd.empty())
8569 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8570 resultBaseOffset = lastSuballoc.offset - allocSize;
8571 if(allocSize > lastSuballoc.offset)
8578 VkDeviceSize resultOffset = resultBaseOffset;
8581 if(VMA_DEBUG_MARGIN > 0)
8583 if(resultOffset < VMA_DEBUG_MARGIN)
8587 resultOffset -= VMA_DEBUG_MARGIN;
8591 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8595 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8597 bool bufferImageGranularityConflict =
false;
8598 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8600 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8601 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8603 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8605 bufferImageGranularityConflict =
true;
8613 if(bufferImageGranularityConflict)
8615 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8620 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8621 suballocations1st.back().offset + suballocations1st.back().size :
8623 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8627 if(bufferImageGranularity > 1)
8629 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8631 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8632 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8634 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8648 pAllocationRequest->offset = resultOffset;
8649 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8650 pAllocationRequest->sumItemSize = 0;
8652 pAllocationRequest->itemsToMakeLostCount = 0;
8658 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8662 VkDeviceSize resultBaseOffset = 0;
8663 if(!suballocations1st.empty())
8665 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8666 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8670 VkDeviceSize resultOffset = resultBaseOffset;
8673 if(VMA_DEBUG_MARGIN > 0)
8675 resultOffset += VMA_DEBUG_MARGIN;
8679 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8683 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8685 bool bufferImageGranularityConflict =
false;
8686 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8688 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8689 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8691 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8693 bufferImageGranularityConflict =
true;
8701 if(bufferImageGranularityConflict)
8703 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8707 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8708 suballocations2nd.back().offset : size;
8711 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8715 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8717 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8719 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8720 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8722 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8736 pAllocationRequest->offset = resultOffset;
8737 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8738 pAllocationRequest->sumItemSize = 0;
8740 pAllocationRequest->itemsToMakeLostCount = 0;
8747 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8749 VMA_ASSERT(!suballocations1st.empty());
8751 VkDeviceSize resultBaseOffset = 0;
8752 if(!suballocations2nd.empty())
8754 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8755 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8759 VkDeviceSize resultOffset = resultBaseOffset;
8762 if(VMA_DEBUG_MARGIN > 0)
8764 resultOffset += VMA_DEBUG_MARGIN;
8768 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8772 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8774 bool bufferImageGranularityConflict =
false;
8775 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8777 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8778 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8780 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8782 bufferImageGranularityConflict =
true;
8790 if(bufferImageGranularityConflict)
8792 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8796 pAllocationRequest->itemsToMakeLostCount = 0;
8797 pAllocationRequest->sumItemSize = 0;
8798 size_t index1st = m_1stNullItemsBeginCount;
8800 if(canMakeOtherLost)
8802 while(index1st < suballocations1st.size() &&
8803 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8806 const VmaSuballocation& suballoc = suballocations1st[index1st];
8807 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
8813 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8814 if(suballoc.hAllocation->CanBecomeLost() &&
8815 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8817 ++pAllocationRequest->itemsToMakeLostCount;
8818 pAllocationRequest->sumItemSize += suballoc.size;
8830 if(bufferImageGranularity > 1)
8832 while(index1st < suballocations1st.size())
8834 const VmaSuballocation& suballoc = suballocations1st[index1st];
8835 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
8837 if(suballoc.hAllocation != VK_NULL_HANDLE)
8840 if(suballoc.hAllocation->CanBecomeLost() &&
8841 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8843 ++pAllocationRequest->itemsToMakeLostCount;
8844 pAllocationRequest->sumItemSize += suballoc.size;
8863 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
8864 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
8868 if(bufferImageGranularity > 1)
8870 for(
size_t nextSuballocIndex = index1st;
8871 nextSuballocIndex < suballocations1st.size();
8872 nextSuballocIndex++)
8874 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
8875 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8877 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8891 pAllocationRequest->offset = resultOffset;
8892 pAllocationRequest->sumFreeSize =
8893 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
8895 - pAllocationRequest->sumItemSize;
8905 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
8906 uint32_t currentFrameIndex,
8907 uint32_t frameInUseCount,
8908 VmaAllocationRequest* pAllocationRequest)
8910 if(pAllocationRequest->itemsToMakeLostCount == 0)
8915 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
8917 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8918 size_t index1st = m_1stNullItemsBeginCount;
8919 size_t madeLostCount = 0;
8920 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
8922 VMA_ASSERT(index1st < suballocations1st.size());
8923 VmaSuballocation& suballoc = suballocations1st[index1st];
8924 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8926 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8927 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
8928 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8930 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8931 suballoc.hAllocation = VK_NULL_HANDLE;
8932 m_SumFreeSize += suballoc.size;
8933 ++m_1stNullItemsMiddleCount;
8950 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8952 uint32_t lostAllocationCount = 0;
8954 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8955 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8957 VmaSuballocation& suballoc = suballocations1st[i];
8958 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8959 suballoc.hAllocation->CanBecomeLost() &&
8960 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8962 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8963 suballoc.hAllocation = VK_NULL_HANDLE;
8964 ++m_1stNullItemsMiddleCount;
8965 m_SumFreeSize += suballoc.size;
8966 ++lostAllocationCount;
8970 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8971 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8973 VmaSuballocation& suballoc = suballocations2nd[i];
8974 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8975 suballoc.hAllocation->CanBecomeLost() &&
8976 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8978 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8979 suballoc.hAllocation = VK_NULL_HANDLE;
8980 ++m_2ndNullItemsCount;
8981 ++lostAllocationCount;
8985 if(lostAllocationCount)
8990 return lostAllocationCount;
8993 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
8995 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8996 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8998 const VmaSuballocation& suballoc = suballocations1st[i];
8999 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9001 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9003 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9004 return VK_ERROR_VALIDATION_FAILED_EXT;
9006 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9008 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9009 return VK_ERROR_VALIDATION_FAILED_EXT;
9014 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9015 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9017 const VmaSuballocation& suballoc = suballocations2nd[i];
9018 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9020 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9022 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9023 return VK_ERROR_VALIDATION_FAILED_EXT;
9025 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9027 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9028 return VK_ERROR_VALIDATION_FAILED_EXT;
9036 void VmaBlockMetadata_Linear::Alloc(
9037 const VmaAllocationRequest& request,
9038 VmaSuballocationType type,
9039 VkDeviceSize allocSize,
9043 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9047 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
9048 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
9049 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9050 suballocations2nd.push_back(newSuballoc);
9051 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
9055 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9058 if(suballocations1st.empty())
9060 suballocations1st.push_back(newSuballoc);
9065 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
9068 VMA_ASSERT(request.offset + allocSize <= GetSize());
9069 suballocations1st.push_back(newSuballoc);
9072 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
9074 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9076 switch(m_2ndVectorMode)
9078 case SECOND_VECTOR_EMPTY:
9080 VMA_ASSERT(suballocations2nd.empty());
9081 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
9083 case SECOND_VECTOR_RING_BUFFER:
9085 VMA_ASSERT(!suballocations2nd.empty());
9087 case SECOND_VECTOR_DOUBLE_STACK:
9088 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
9094 suballocations2nd.push_back(newSuballoc);
9098 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
9103 m_SumFreeSize -= newSuballoc.size;
9106 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
9108 FreeAtOffset(allocation->GetOffset());
9111 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
9113 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9114 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9116 if(!suballocations1st.empty())
9119 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9120 if(firstSuballoc.offset == offset)
9122 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9123 firstSuballoc.hAllocation = VK_NULL_HANDLE;
9124 m_SumFreeSize += firstSuballoc.size;
9125 ++m_1stNullItemsBeginCount;
9132 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
9133 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9135 VmaSuballocation& lastSuballoc = suballocations2nd.back();
9136 if(lastSuballoc.offset == offset)
9138 m_SumFreeSize += lastSuballoc.size;
9139 suballocations2nd.pop_back();
9145 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
9147 VmaSuballocation& lastSuballoc = suballocations1st.back();
9148 if(lastSuballoc.offset == offset)
9150 m_SumFreeSize += lastSuballoc.size;
9151 suballocations1st.pop_back();
9159 VmaSuballocation refSuballoc;
9160 refSuballoc.offset = offset;
9162 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
9163 suballocations1st.begin() + m_1stNullItemsBeginCount,
9164 suballocations1st.end(),
9166 if(it != suballocations1st.end())
9168 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9169 it->hAllocation = VK_NULL_HANDLE;
9170 ++m_1stNullItemsMiddleCount;
9171 m_SumFreeSize += it->size;
9177 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
9180 VmaSuballocation refSuballoc;
9181 refSuballoc.offset = offset;
9183 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
9184 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
9185 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
9186 if(it != suballocations2nd.end())
9188 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9189 it->hAllocation = VK_NULL_HANDLE;
9190 ++m_2ndNullItemsCount;
9191 m_SumFreeSize += it->size;
9197 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
9200 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 9202 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9203 const size_t suballocCount = AccessSuballocations1st().size();
9204 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
9207 void VmaBlockMetadata_Linear::CleanupAfterFree()
9209 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9210 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9214 suballocations1st.clear();
9215 suballocations2nd.clear();
9216 m_1stNullItemsBeginCount = 0;
9217 m_1stNullItemsMiddleCount = 0;
9218 m_2ndNullItemsCount = 0;
9219 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9223 const size_t suballoc1stCount = suballocations1st.size();
9224 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9225 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
9228 while(m_1stNullItemsBeginCount < suballoc1stCount &&
9229 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9231 ++m_1stNullItemsBeginCount;
9232 --m_1stNullItemsMiddleCount;
9236 while(m_1stNullItemsMiddleCount > 0 &&
9237 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
9239 --m_1stNullItemsMiddleCount;
9240 suballocations1st.pop_back();
9244 while(m_2ndNullItemsCount > 0 &&
9245 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
9247 --m_2ndNullItemsCount;
9248 suballocations2nd.pop_back();
9251 if(ShouldCompact1st())
9253 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
9254 size_t srcIndex = m_1stNullItemsBeginCount;
9255 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
9257 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
9261 if(dstIndex != srcIndex)
9263 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9267 suballocations1st.resize(nonNullItemCount);
9268 m_1stNullItemsBeginCount = 0;
9269 m_1stNullItemsMiddleCount = 0;
9273 if(suballocations2nd.empty())
9275 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9279 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9281 suballocations1st.clear();
9282 m_1stNullItemsBeginCount = 0;
9284 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9287 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9288 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9289 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9290 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9292 ++m_1stNullItemsBeginCount;
9293 --m_1stNullItemsMiddleCount;
9295 m_2ndNullItemsCount = 0;
9296 m_1stVectorIndex ^= 1;
9301 VMA_HEAVY_ASSERT(Validate());
9308 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
9309 VmaBlockMetadata(hAllocator),
9311 m_AllocationCount(0),
9315 memset(m_FreeList, 0,
sizeof(m_FreeList));
9318 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9323 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
9325 VmaBlockMetadata::Init(size);
9327 m_UsableSize = VmaPrevPow2(size);
9328 m_SumFreeSize = m_UsableSize;
9332 while(m_LevelCount < MAX_LEVELS &&
9333 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
9338 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
9339 rootNode->offset = 0;
9340 rootNode->type = Node::TYPE_FREE;
9341 rootNode->parent = VMA_NULL;
9342 rootNode->buddy = VMA_NULL;
9345 AddToFreeListFront(0, rootNode);
9348 bool VmaBlockMetadata_Buddy::Validate()
const 9351 ValidationContext ctx;
9352 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
9354 VMA_VALIDATE(
false &&
"ValidateNode failed.");
9356 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
9357 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
9360 for(uint32_t level = 0; level < m_LevelCount; ++level)
9362 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
9363 m_FreeList[level].front->free.prev == VMA_NULL);
9365 for(Node* node = m_FreeList[level].front;
9367 node = node->free.next)
9369 VMA_VALIDATE(node->type == Node::TYPE_FREE);
9371 if(node->free.next == VMA_NULL)
9373 VMA_VALIDATE(m_FreeList[level].back == node);
9377 VMA_VALIDATE(node->free.next->free.prev == node);
9383 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
9385 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
9391 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 9393 for(uint32_t level = 0; level < m_LevelCount; ++level)
9395 if(m_FreeList[level].front != VMA_NULL)
9397 return LevelToNodeSize(level);
9403 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9405 const VkDeviceSize unusableSize = GetUnusableSize();
9416 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
9418 if(unusableSize > 0)
9427 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 9429 const VkDeviceSize unusableSize = GetUnusableSize();
9431 inoutStats.
size += GetSize();
9432 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
9437 if(unusableSize > 0)
9444 #if VMA_STATS_STRING_ENABLED 9446 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 9450 CalcAllocationStatInfo(stat);
9452 PrintDetailedMap_Begin(
9458 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
9460 const VkDeviceSize unusableSize = GetUnusableSize();
9461 if(unusableSize > 0)
9463 PrintDetailedMap_UnusedRange(json,
9468 PrintDetailedMap_End(json);
9471 #endif // #if VMA_STATS_STRING_ENABLED 9473 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9474 uint32_t currentFrameIndex,
9475 uint32_t frameInUseCount,
9476 VkDeviceSize bufferImageGranularity,
9477 VkDeviceSize allocSize,
9478 VkDeviceSize allocAlignment,
9480 VmaSuballocationType allocType,
9481 bool canMakeOtherLost,
9483 VmaAllocationRequest* pAllocationRequest)
9485 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9489 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
9490 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
9491 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
9493 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
9494 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
9497 if(allocSize > m_UsableSize)
9502 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9503 for(uint32_t level = targetLevel + 1; level--; )
9505 for(Node* freeNode = m_FreeList[level].front;
9506 freeNode != VMA_NULL;
9507 freeNode = freeNode->free.next)
9509 if(freeNode->offset % allocAlignment == 0)
9511 pAllocationRequest->offset = freeNode->offset;
9512 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
9513 pAllocationRequest->sumItemSize = 0;
9514 pAllocationRequest->itemsToMakeLostCount = 0;
9515 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9524 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
9525 uint32_t currentFrameIndex,
9526 uint32_t frameInUseCount,
9527 VmaAllocationRequest* pAllocationRequest)
9533 return pAllocationRequest->itemsToMakeLostCount == 0;
9536 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9545 void VmaBlockMetadata_Buddy::Alloc(
9546 const VmaAllocationRequest& request,
9547 VmaSuballocationType type,
9548 VkDeviceSize allocSize,
9552 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9553 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
9555 Node* currNode = m_FreeList[currLevel].front;
9556 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9557 while(currNode->offset != request.offset)
9559 currNode = currNode->free.next;
9560 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9564 while(currLevel < targetLevel)
9568 RemoveFromFreeList(currLevel, currNode);
9570 const uint32_t childrenLevel = currLevel + 1;
9573 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
9574 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
9576 leftChild->offset = currNode->offset;
9577 leftChild->type = Node::TYPE_FREE;
9578 leftChild->parent = currNode;
9579 leftChild->buddy = rightChild;
9581 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9582 rightChild->type = Node::TYPE_FREE;
9583 rightChild->parent = currNode;
9584 rightChild->buddy = leftChild;
9587 currNode->type = Node::TYPE_SPLIT;
9588 currNode->split.leftChild = leftChild;
9591 AddToFreeListFront(childrenLevel, rightChild);
9592 AddToFreeListFront(childrenLevel, leftChild);
9597 currNode = m_FreeList[currLevel].front;
9606 VMA_ASSERT(currLevel == targetLevel &&
9607 currNode != VMA_NULL &&
9608 currNode->type == Node::TYPE_FREE);
9609 RemoveFromFreeList(currLevel, currNode);
9612 currNode->type = Node::TYPE_ALLOCATION;
9613 currNode->allocation.alloc = hAllocation;
9615 ++m_AllocationCount;
9617 m_SumFreeSize -= allocSize;
9620 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
9622 if(node->type == Node::TYPE_SPLIT)
9624 DeleteNode(node->split.leftChild->buddy);
9625 DeleteNode(node->split.leftChild);
9628 vma_delete(GetAllocationCallbacks(), node);
9631 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 9633 VMA_VALIDATE(level < m_LevelCount);
9634 VMA_VALIDATE(curr->parent == parent);
9635 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
9636 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
9639 case Node::TYPE_FREE:
9641 ctx.calculatedSumFreeSize += levelNodeSize;
9642 ++ctx.calculatedFreeCount;
9644 case Node::TYPE_ALLOCATION:
9645 ++ctx.calculatedAllocationCount;
9646 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
9647 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
9649 case Node::TYPE_SPLIT:
9651 const uint32_t childrenLevel = level + 1;
9652 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
9653 const Node*
const leftChild = curr->split.leftChild;
9654 VMA_VALIDATE(leftChild != VMA_NULL);
9655 VMA_VALIDATE(leftChild->offset == curr->offset);
9656 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
9658 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
9660 const Node*
const rightChild = leftChild->buddy;
9661 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
9662 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
9664 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
9675 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 9679 VkDeviceSize currLevelNodeSize = m_UsableSize;
9680 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
9681 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
9684 currLevelNodeSize = nextLevelNodeSize;
9685 nextLevelNodeSize = currLevelNodeSize >> 1;
9690 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
9693 Node* node = m_Root;
9694 VkDeviceSize nodeOffset = 0;
9696 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
9697 while(node->type == Node::TYPE_SPLIT)
9699 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
9700 if(offset < nodeOffset + nextLevelSize)
9702 node = node->split.leftChild;
9706 node = node->split.leftChild->buddy;
9707 nodeOffset += nextLevelSize;
9710 levelNodeSize = nextLevelSize;
9713 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9714 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
9717 --m_AllocationCount;
9718 m_SumFreeSize += alloc->GetSize();
9720 node->type = Node::TYPE_FREE;
9723 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
9725 RemoveFromFreeList(level, node->buddy);
9726 Node*
const parent = node->parent;
9728 vma_delete(GetAllocationCallbacks(), node->buddy);
9729 vma_delete(GetAllocationCallbacks(), node);
9730 parent->type = Node::TYPE_FREE;
9738 AddToFreeListFront(level, node);
9741 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 9745 case Node::TYPE_FREE:
9751 case Node::TYPE_ALLOCATION:
9753 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9759 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
9760 if(unusedRangeSize > 0)
9769 case Node::TYPE_SPLIT:
9771 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9772 const Node*
const leftChild = node->split.leftChild;
9773 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
9774 const Node*
const rightChild = leftChild->buddy;
9775 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
9783 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
9785 VMA_ASSERT(node->type == Node::TYPE_FREE);
9788 Node*
const frontNode = m_FreeList[level].front;
9789 if(frontNode == VMA_NULL)
9791 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9792 node->free.prev = node->free.next = VMA_NULL;
9793 m_FreeList[level].front = m_FreeList[level].back = node;
9797 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9798 node->free.prev = VMA_NULL;
9799 node->free.next = frontNode;
9800 frontNode->free.prev = node;
9801 m_FreeList[level].front = node;
9805 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
9807 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
9810 if(node->free.prev == VMA_NULL)
9812 VMA_ASSERT(m_FreeList[level].front == node);
9813 m_FreeList[level].front = node->free.next;
9817 Node*
const prevFreeNode = node->free.prev;
9818 VMA_ASSERT(prevFreeNode->free.next == node);
9819 prevFreeNode->free.next = node->free.next;
9823 if(node->free.next == VMA_NULL)
9825 VMA_ASSERT(m_FreeList[level].back == node);
9826 m_FreeList[level].back = node->free.prev;
9830 Node*
const nextFreeNode = node->free.next;
9831 VMA_ASSERT(nextFreeNode->free.prev == node);
9832 nextFreeNode->free.prev = node->free.prev;
9836 #if VMA_STATS_STRING_ENABLED 9837 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 9841 case Node::TYPE_FREE:
9842 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
9844 case Node::TYPE_ALLOCATION:
9846 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
9847 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9848 if(allocSize < levelNodeSize)
9850 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
9854 case Node::TYPE_SPLIT:
9856 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9857 const Node*
const leftChild = node->split.leftChild;
9858 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
9859 const Node*
const rightChild = leftChild->buddy;
9860 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
9867 #endif // #if VMA_STATS_STRING_ENABLED 9873 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
9874 m_pMetadata(VMA_NULL),
9875 m_MemoryTypeIndex(UINT32_MAX),
9877 m_hMemory(VK_NULL_HANDLE),
9879 m_pMappedData(VMA_NULL)
9883 void VmaDeviceMemoryBlock::Init(
9885 uint32_t newMemoryTypeIndex,
9886 VkDeviceMemory newMemory,
9887 VkDeviceSize newSize,
9891 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
9893 m_MemoryTypeIndex = newMemoryTypeIndex;
9895 m_hMemory = newMemory;
9900 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
9903 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
9909 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
9911 m_pMetadata->Init(newSize);
9914 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
9918 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
9920 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
9921 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
9922 m_hMemory = VK_NULL_HANDLE;
9924 vma_delete(allocator, m_pMetadata);
9925 m_pMetadata = VMA_NULL;
9928 bool VmaDeviceMemoryBlock::Validate()
const 9930 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
9931 (m_pMetadata->GetSize() != 0));
9933 return m_pMetadata->Validate();
9936 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
9938 void* pData =
nullptr;
9939 VkResult res = Map(hAllocator, 1, &pData);
9940 if(res != VK_SUCCESS)
9945 res = m_pMetadata->CheckCorruption(pData);
9947 Unmap(hAllocator, 1);
9952 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
9959 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9962 m_MapCount += count;
9963 VMA_ASSERT(m_pMappedData != VMA_NULL);
9964 if(ppData != VMA_NULL)
9966 *ppData = m_pMappedData;
9972 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9973 hAllocator->m_hDevice,
9979 if(result == VK_SUCCESS)
9981 if(ppData != VMA_NULL)
9983 *ppData = m_pMappedData;
9991 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
9998 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9999 if(m_MapCount >= count)
10001 m_MapCount -= count;
10002 if(m_MapCount == 0)
10004 m_pMappedData = VMA_NULL;
10005 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
10010 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
10014 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10016 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10017 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10020 VkResult res = Map(hAllocator, 1, &pData);
10021 if(res != VK_SUCCESS)
10026 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
10027 VmaWriteMagicValue(pData, allocOffset + allocSize);
10029 Unmap(hAllocator, 1);
10034 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10036 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10037 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10040 VkResult res = Map(hAllocator, 1, &pData);
10041 if(res != VK_SUCCESS)
10046 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
10048 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
10050 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
10052 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
10055 Unmap(hAllocator, 1);
10060 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
10065 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10066 hAllocation->GetBlock() ==
this);
10068 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10069 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
10070 hAllocator->m_hDevice,
10073 hAllocation->GetOffset());
10076 VkResult VmaDeviceMemoryBlock::BindImageMemory(
10081 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10082 hAllocation->GetBlock() ==
this);
10084 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10085 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
10086 hAllocator->m_hDevice,
10089 hAllocation->GetOffset());
10094 memset(&outInfo, 0,
sizeof(outInfo));
10113 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
10121 VmaPool_T::VmaPool_T(
10124 VkDeviceSize preferredBlockSize) :
10127 createInfo.memoryTypeIndex,
10128 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
10129 createInfo.minBlockCount,
10130 createInfo.maxBlockCount,
10132 createInfo.frameInUseCount,
10134 createInfo.blockSize != 0,
10140 VmaPool_T::~VmaPool_T()
10144 #if VMA_STATS_STRING_ENABLED 10146 #endif // #if VMA_STATS_STRING_ENABLED 10148 VmaBlockVector::VmaBlockVector(
10150 uint32_t memoryTypeIndex,
10151 VkDeviceSize preferredBlockSize,
10152 size_t minBlockCount,
10153 size_t maxBlockCount,
10154 VkDeviceSize bufferImageGranularity,
10155 uint32_t frameInUseCount,
10157 bool explicitBlockSize,
10158 uint32_t algorithm) :
10159 m_hAllocator(hAllocator),
10160 m_MemoryTypeIndex(memoryTypeIndex),
10161 m_PreferredBlockSize(preferredBlockSize),
10162 m_MinBlockCount(minBlockCount),
10163 m_MaxBlockCount(maxBlockCount),
10164 m_BufferImageGranularity(bufferImageGranularity),
10165 m_FrameInUseCount(frameInUseCount),
10166 m_IsCustomPool(isCustomPool),
10167 m_ExplicitBlockSize(explicitBlockSize),
10168 m_Algorithm(algorithm),
10169 m_HasEmptyBlock(false),
10170 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
10171 m_pDefragmentator(VMA_NULL),
10176 VmaBlockVector::~VmaBlockVector()
10178 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
10180 for(
size_t i = m_Blocks.size(); i--; )
10182 m_Blocks[i]->Destroy(m_hAllocator);
10183 vma_delete(m_hAllocator, m_Blocks[i]);
10187 VkResult VmaBlockVector::CreateMinBlocks()
10189 for(
size_t i = 0; i < m_MinBlockCount; ++i)
10191 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
10192 if(res != VK_SUCCESS)
10200 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
10202 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10204 const size_t blockCount = m_Blocks.size();
10213 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10215 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10216 VMA_ASSERT(pBlock);
10217 VMA_HEAVY_ASSERT(pBlock->Validate());
10218 pBlock->m_pMetadata->AddPoolStats(*pStats);
10222 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 10224 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10225 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
10226 (VMA_DEBUG_MARGIN > 0) &&
10227 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
10230 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
10232 VkResult VmaBlockVector::Allocate(
10234 uint32_t currentFrameIndex,
10236 VkDeviceSize alignment,
10238 VmaSuballocationType suballocType,
10245 const bool canCreateNewBlock =
10247 (m_Blocks.size() < m_MaxBlockCount);
10254 canMakeOtherLost =
false;
10258 if(isUpperAddress &&
10261 return VK_ERROR_FEATURE_NOT_PRESENT;
10275 return VK_ERROR_FEATURE_NOT_PRESENT;
10279 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
10281 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10284 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10291 if(!canMakeOtherLost || canCreateNewBlock)
10300 if(!m_Blocks.empty())
10302 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
10303 VMA_ASSERT(pCurrBlock);
10304 VkResult res = AllocateFromBlock(
10315 if(res == VK_SUCCESS)
10317 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
10327 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10329 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10330 VMA_ASSERT(pCurrBlock);
10331 VkResult res = AllocateFromBlock(
10342 if(res == VK_SUCCESS)
10344 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10352 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10354 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10355 VMA_ASSERT(pCurrBlock);
10356 VkResult res = AllocateFromBlock(
10367 if(res == VK_SUCCESS)
10369 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10377 if(canCreateNewBlock)
10380 VkDeviceSize newBlockSize = m_PreferredBlockSize;
10381 uint32_t newBlockSizeShift = 0;
10382 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
10384 if(!m_ExplicitBlockSize)
10387 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
10388 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
10390 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10391 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
10393 newBlockSize = smallerNewBlockSize;
10394 ++newBlockSizeShift;
10403 size_t newBlockIndex = 0;
10404 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
10406 if(!m_ExplicitBlockSize)
10408 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
10410 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10411 if(smallerNewBlockSize >= size)
10413 newBlockSize = smallerNewBlockSize;
10414 ++newBlockSizeShift;
10415 res = CreateBlock(newBlockSize, &newBlockIndex);
10424 if(res == VK_SUCCESS)
10426 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
10427 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
10429 res = AllocateFromBlock(
10440 if(res == VK_SUCCESS)
10442 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
10448 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10455 if(canMakeOtherLost)
10457 uint32_t tryIndex = 0;
10458 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
10460 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
10461 VmaAllocationRequest bestRequest = {};
10462 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
10468 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10470 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10471 VMA_ASSERT(pCurrBlock);
10472 VmaAllocationRequest currRequest = {};
10473 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10476 m_BufferImageGranularity,
10485 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10486 if(pBestRequestBlock == VMA_NULL ||
10487 currRequestCost < bestRequestCost)
10489 pBestRequestBlock = pCurrBlock;
10490 bestRequest = currRequest;
10491 bestRequestCost = currRequestCost;
10493 if(bestRequestCost == 0)
10504 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10506 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10507 VMA_ASSERT(pCurrBlock);
10508 VmaAllocationRequest currRequest = {};
10509 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10512 m_BufferImageGranularity,
10521 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10522 if(pBestRequestBlock == VMA_NULL ||
10523 currRequestCost < bestRequestCost ||
10526 pBestRequestBlock = pCurrBlock;
10527 bestRequest = currRequest;
10528 bestRequestCost = currRequestCost;
10530 if(bestRequestCost == 0 ||
10540 if(pBestRequestBlock != VMA_NULL)
10544 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
10545 if(res != VK_SUCCESS)
10551 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
10557 if(pBestRequestBlock->m_pMetadata->IsEmpty())
10559 m_HasEmptyBlock =
false;
10562 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10563 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
10564 (*pAllocation)->InitBlockAllocation(
10567 bestRequest.offset,
10573 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
10574 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
10575 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
10576 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10578 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10580 if(IsCorruptionDetectionEnabled())
10582 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
10583 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10598 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
10600 return VK_ERROR_TOO_MANY_OBJECTS;
10604 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10607 void VmaBlockVector::Free(
10610 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
10614 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10616 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
10618 if(IsCorruptionDetectionEnabled())
10620 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
10621 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
10624 if(hAllocation->IsPersistentMap())
10626 pBlock->Unmap(m_hAllocator, 1);
10629 pBlock->m_pMetadata->Free(hAllocation);
10630 VMA_HEAVY_ASSERT(pBlock->Validate());
10632 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
10635 if(pBlock->m_pMetadata->IsEmpty())
10638 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
10640 pBlockToDelete = pBlock;
10646 m_HasEmptyBlock =
true;
10651 else if(m_HasEmptyBlock)
10653 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
10654 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
10656 pBlockToDelete = pLastBlock;
10657 m_Blocks.pop_back();
10658 m_HasEmptyBlock =
false;
10662 IncrementallySortBlocks();
10667 if(pBlockToDelete != VMA_NULL)
10669 VMA_DEBUG_LOG(
" Deleted empty allocation");
10670 pBlockToDelete->Destroy(m_hAllocator);
10671 vma_delete(m_hAllocator, pBlockToDelete);
10675 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 10677 VkDeviceSize result = 0;
10678 for(
size_t i = m_Blocks.size(); i--; )
10680 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
10681 if(result >= m_PreferredBlockSize)
10689 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
10691 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10693 if(m_Blocks[blockIndex] == pBlock)
10695 VmaVectorRemove(m_Blocks, blockIndex);
10702 void VmaBlockVector::IncrementallySortBlocks()
10707 for(
size_t i = 1; i < m_Blocks.size(); ++i)
10709 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
10711 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
10718 VkResult VmaBlockVector::AllocateFromBlock(
10719 VmaDeviceMemoryBlock* pBlock,
10721 uint32_t currentFrameIndex,
10723 VkDeviceSize alignment,
10726 VmaSuballocationType suballocType,
10735 VmaAllocationRequest currRequest = {};
10736 if(pBlock->m_pMetadata->CreateAllocationRequest(
10739 m_BufferImageGranularity,
10749 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
10753 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
10754 if(res != VK_SUCCESS)
10761 if(pBlock->m_pMetadata->IsEmpty())
10763 m_HasEmptyBlock =
false;
10766 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10767 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
10768 (*pAllocation)->InitBlockAllocation(
10771 currRequest.offset,
10777 VMA_HEAVY_ASSERT(pBlock->Validate());
10778 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
10779 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10781 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10783 if(IsCorruptionDetectionEnabled())
10785 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
10786 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10790 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10793 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
10795 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
10796 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
10797 allocInfo.allocationSize = blockSize;
10798 VkDeviceMemory mem = VK_NULL_HANDLE;
10799 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
10808 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
10813 allocInfo.allocationSize,
10817 m_Blocks.push_back(pBlock);
10818 if(pNewBlockIndex != VMA_NULL)
10820 *pNewBlockIndex = m_Blocks.size() - 1;
10826 #if VMA_STATS_STRING_ENABLED 10828 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
10830 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10832 json.BeginObject();
10836 json.WriteString(
"MemoryTypeIndex");
10837 json.WriteNumber(m_MemoryTypeIndex);
10839 json.WriteString(
"BlockSize");
10840 json.WriteNumber(m_PreferredBlockSize);
10842 json.WriteString(
"BlockCount");
10843 json.BeginObject(
true);
10844 if(m_MinBlockCount > 0)
10846 json.WriteString(
"Min");
10847 json.WriteNumber((uint64_t)m_MinBlockCount);
10849 if(m_MaxBlockCount < SIZE_MAX)
10851 json.WriteString(
"Max");
10852 json.WriteNumber((uint64_t)m_MaxBlockCount);
10854 json.WriteString(
"Cur");
10855 json.WriteNumber((uint64_t)m_Blocks.size());
10858 if(m_FrameInUseCount > 0)
10860 json.WriteString(
"FrameInUseCount");
10861 json.WriteNumber(m_FrameInUseCount);
10864 if(m_Algorithm != 0)
10866 json.WriteString(
"Algorithm");
10867 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
10872 json.WriteString(
"PreferredBlockSize");
10873 json.WriteNumber(m_PreferredBlockSize);
10876 json.WriteString(
"Blocks");
10877 json.BeginObject();
10878 for(
size_t i = 0; i < m_Blocks.size(); ++i)
10880 json.BeginString();
10881 json.ContinueString(m_Blocks[i]->GetId());
10884 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
10891 #endif // #if VMA_STATS_STRING_ENABLED 10893 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
10895 uint32_t currentFrameIndex)
10897 if(m_pDefragmentator == VMA_NULL)
10899 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
10902 currentFrameIndex);
10905 return m_pDefragmentator;
10908 VkResult VmaBlockVector::Defragment(
10910 VkDeviceSize& maxBytesToMove,
10911 uint32_t& maxAllocationsToMove)
10913 if(m_pDefragmentator == VMA_NULL)
10918 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10921 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
10924 if(pDefragmentationStats != VMA_NULL)
10926 const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
10927 const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
10928 pDefragmentationStats->
bytesMoved += bytesMoved;
10930 VMA_ASSERT(bytesMoved <= maxBytesToMove);
10931 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
10932 maxBytesToMove -= bytesMoved;
10933 maxAllocationsToMove -= allocationsMoved;
10937 m_HasEmptyBlock =
false;
10938 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10940 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
10941 if(pBlock->m_pMetadata->IsEmpty())
10943 if(m_Blocks.size() > m_MinBlockCount)
10945 if(pDefragmentationStats != VMA_NULL)
10948 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
10951 VmaVectorRemove(m_Blocks, blockIndex);
10952 pBlock->Destroy(m_hAllocator);
10953 vma_delete(m_hAllocator, pBlock);
10957 m_HasEmptyBlock =
true;
10965 void VmaBlockVector::DestroyDefragmentator()
10967 if(m_pDefragmentator != VMA_NULL)
10969 vma_delete(m_hAllocator, m_pDefragmentator);
10970 m_pDefragmentator = VMA_NULL;
10974 void VmaBlockVector::MakePoolAllocationsLost(
10975 uint32_t currentFrameIndex,
10976 size_t* pLostAllocationCount)
10978 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10979 size_t lostAllocationCount = 0;
10980 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10982 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10983 VMA_ASSERT(pBlock);
10984 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
10986 if(pLostAllocationCount != VMA_NULL)
10988 *pLostAllocationCount = lostAllocationCount;
10992 VkResult VmaBlockVector::CheckCorruption()
10994 if(!IsCorruptionDetectionEnabled())
10996 return VK_ERROR_FEATURE_NOT_PRESENT;
10999 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11000 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11002 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11003 VMA_ASSERT(pBlock);
11004 VkResult res = pBlock->CheckCorruption(m_hAllocator);
11005 if(res != VK_SUCCESS)
11013 void VmaBlockVector::AddStats(
VmaStats* pStats)
11015 const uint32_t memTypeIndex = m_MemoryTypeIndex;
11016 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
11018 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11020 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11022 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11023 VMA_ASSERT(pBlock);
11024 VMA_HEAVY_ASSERT(pBlock->Validate());
11026 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
11027 VmaAddStatInfo(pStats->
total, allocationStatInfo);
11028 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
11029 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11036 VmaDefragmentator::VmaDefragmentator(
11038 VmaBlockVector* pBlockVector,
11039 uint32_t currentFrameIndex) :
11040 m_hAllocator(hAllocator),
11041 m_pBlockVector(pBlockVector),
11042 m_CurrentFrameIndex(currentFrameIndex),
11044 m_AllocationsMoved(0),
11045 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
11046 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
11048 VMA_ASSERT(pBlockVector->GetAlgorithm() == 0);
11051 VmaDefragmentator::~VmaDefragmentator()
11053 for(
size_t i = m_Blocks.size(); i--; )
11055 vma_delete(m_hAllocator, m_Blocks[i]);
11059 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
11061 AllocationInfo allocInfo;
11062 allocInfo.m_hAllocation = hAlloc;
11063 allocInfo.m_pChanged = pChanged;
11064 m_Allocations.push_back(allocInfo);
11067 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
11070 if(m_pMappedDataForDefragmentation)
11072 *ppMappedData = m_pMappedDataForDefragmentation;
11077 if(m_pBlock->GetMappedData())
11079 *ppMappedData = m_pBlock->GetMappedData();
11084 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
11085 *ppMappedData = m_pMappedDataForDefragmentation;
11089 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
11091 if(m_pMappedDataForDefragmentation != VMA_NULL)
11093 m_pBlock->Unmap(hAllocator, 1);
11097 VkResult VmaDefragmentator::DefragmentRound(
11098 VkDeviceSize maxBytesToMove,
11099 uint32_t maxAllocationsToMove)
11101 if(m_Blocks.empty())
11106 size_t srcBlockIndex = m_Blocks.size() - 1;
11107 size_t srcAllocIndex = SIZE_MAX;
11113 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
11115 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
11118 if(srcBlockIndex == 0)
11125 srcAllocIndex = SIZE_MAX;
11130 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
11134 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
11135 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
11137 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
11138 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
11139 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
11140 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
11143 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
11145 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
11146 VmaAllocationRequest dstAllocRequest;
11147 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
11148 m_CurrentFrameIndex,
11149 m_pBlockVector->GetFrameInUseCount(),
11150 m_pBlockVector->GetBufferImageGranularity(),
11157 &dstAllocRequest) &&
11159 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
11161 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
11164 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
11165 (m_BytesMoved + size > maxBytesToMove))
11167 return VK_INCOMPLETE;
11170 void* pDstMappedData = VMA_NULL;
11171 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
11172 if(res != VK_SUCCESS)
11177 void* pSrcMappedData = VMA_NULL;
11178 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
11179 if(res != VK_SUCCESS)
11186 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
11187 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
11188 static_cast<size_t>(size));
11190 if(VMA_DEBUG_MARGIN > 0)
11192 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
11193 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
11196 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
11201 allocInfo.m_hAllocation);
11202 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
11204 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
11206 if(allocInfo.m_pChanged != VMA_NULL)
11208 *allocInfo.m_pChanged = VK_TRUE;
11211 ++m_AllocationsMoved;
11212 m_BytesMoved += size;
11214 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
11222 if(srcAllocIndex > 0)
11228 if(srcBlockIndex > 0)
11231 srcAllocIndex = SIZE_MAX;
11241 VkResult VmaDefragmentator::Defragment(
11242 VkDeviceSize maxBytesToMove,
11243 uint32_t maxAllocationsToMove)
11245 if(m_Allocations.empty())
11251 const size_t blockCount = m_pBlockVector->m_Blocks.size();
11252 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11254 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
11255 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
11256 m_Blocks.push_back(pBlockInfo);
11260 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
11263 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
11265 AllocationInfo& allocInfo = m_Allocations[blockIndex];
11267 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11269 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
11270 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
11271 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
11273 (*it)->m_Allocations.push_back(allocInfo);
11281 m_Allocations.clear();
11283 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11285 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
11286 pBlockInfo->CalcHasNonMovableAllocations();
11287 pBlockInfo->SortAllocationsBySizeDescecnding();
11291 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
11294 VkResult result = VK_SUCCESS;
11295 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
11297 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
11301 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11303 m_Blocks[blockIndex]->Unmap(m_hAllocator);
11309 bool VmaDefragmentator::MoveMakesSense(
11310 size_t dstBlockIndex, VkDeviceSize dstOffset,
11311 size_t srcBlockIndex, VkDeviceSize srcOffset)
11313 if(dstBlockIndex < srcBlockIndex)
11317 if(dstBlockIndex > srcBlockIndex)
11321 if(dstOffset < srcOffset)
11331 #if VMA_RECORDING_ENABLED 11333 VmaRecorder::VmaRecorder() :
11338 m_StartCounter(INT64_MAX)
11344 m_UseMutex = useMutex;
11345 m_Flags = settings.
flags;
11347 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
11348 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
11351 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
11354 return VK_ERROR_INITIALIZATION_FAILED;
11358 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
11359 fprintf(m_File,
"%s\n",
"1,3");
11364 VmaRecorder::~VmaRecorder()
11366 if(m_File != VMA_NULL)
11372 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
11374 CallParams callParams;
11375 GetBasicParams(callParams);
11377 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11378 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
11382 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
11384 CallParams callParams;
11385 GetBasicParams(callParams);
11387 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11388 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
11394 CallParams callParams;
11395 GetBasicParams(callParams);
11397 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11398 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
11409 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
11411 CallParams callParams;
11412 GetBasicParams(callParams);
11414 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11415 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
11420 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
11421 const VkMemoryRequirements& vkMemReq,
11425 CallParams callParams;
11426 GetBasicParams(callParams);
11428 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11429 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11430 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11432 vkMemReq.alignment,
11433 vkMemReq.memoryTypeBits,
11441 userDataStr.GetString());
11445 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
11446 const VkMemoryRequirements& vkMemReq,
11447 bool requiresDedicatedAllocation,
11448 bool prefersDedicatedAllocation,
11452 CallParams callParams;
11453 GetBasicParams(callParams);
11455 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11456 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11457 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11459 vkMemReq.alignment,
11460 vkMemReq.memoryTypeBits,
11461 requiresDedicatedAllocation ? 1 : 0,
11462 prefersDedicatedAllocation ? 1 : 0,
11470 userDataStr.GetString());
11474 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
11475 const VkMemoryRequirements& vkMemReq,
11476 bool requiresDedicatedAllocation,
11477 bool prefersDedicatedAllocation,
11481 CallParams callParams;
11482 GetBasicParams(callParams);
11484 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11485 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11486 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11488 vkMemReq.alignment,
11489 vkMemReq.memoryTypeBits,
11490 requiresDedicatedAllocation ? 1 : 0,
11491 prefersDedicatedAllocation ? 1 : 0,
11499 userDataStr.GetString());
11503 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
11506 CallParams callParams;
11507 GetBasicParams(callParams);
11509 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11510 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11515 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
11517 const void* pUserData)
11519 CallParams callParams;
11520 GetBasicParams(callParams);
11522 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11523 UserDataString userDataStr(
11526 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11528 userDataStr.GetString());
11532 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
11535 CallParams callParams;
11536 GetBasicParams(callParams);
11538 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11539 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11544 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
11547 CallParams callParams;
11548 GetBasicParams(callParams);
11550 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11551 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11556 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
11559 CallParams callParams;
11560 GetBasicParams(callParams);
11562 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11563 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11568 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
11569 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11571 CallParams callParams;
11572 GetBasicParams(callParams);
11574 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11575 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11582 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
11583 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11585 CallParams callParams;
11586 GetBasicParams(callParams);
11588 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11589 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11596 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
11597 const VkBufferCreateInfo& bufCreateInfo,
11601 CallParams callParams;
11602 GetBasicParams(callParams);
11604 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11605 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11606 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11607 bufCreateInfo.flags,
11608 bufCreateInfo.size,
11609 bufCreateInfo.usage,
11610 bufCreateInfo.sharingMode,
11611 allocCreateInfo.
flags,
11612 allocCreateInfo.
usage,
11616 allocCreateInfo.
pool,
11618 userDataStr.GetString());
11622 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
11623 const VkImageCreateInfo& imageCreateInfo,
11627 CallParams callParams;
11628 GetBasicParams(callParams);
11630 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11631 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11632 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11633 imageCreateInfo.flags,
11634 imageCreateInfo.imageType,
11635 imageCreateInfo.format,
11636 imageCreateInfo.extent.width,
11637 imageCreateInfo.extent.height,
11638 imageCreateInfo.extent.depth,
11639 imageCreateInfo.mipLevels,
11640 imageCreateInfo.arrayLayers,
11641 imageCreateInfo.samples,
11642 imageCreateInfo.tiling,
11643 imageCreateInfo.usage,
11644 imageCreateInfo.sharingMode,
11645 imageCreateInfo.initialLayout,
11646 allocCreateInfo.
flags,
11647 allocCreateInfo.
usage,
11651 allocCreateInfo.
pool,
11653 userDataStr.GetString());
11657 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
11660 CallParams callParams;
11661 GetBasicParams(callParams);
11663 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11664 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
11669 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
11672 CallParams callParams;
11673 GetBasicParams(callParams);
11675 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11676 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
11681 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
11684 CallParams callParams;
11685 GetBasicParams(callParams);
11687 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11688 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11693 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
11696 CallParams callParams;
11697 GetBasicParams(callParams);
11699 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11700 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
11705 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
11708 CallParams callParams;
11709 GetBasicParams(callParams);
11711 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11712 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
11719 if(pUserData != VMA_NULL)
11723 m_Str = (
const char*)pUserData;
11727 sprintf_s(m_PtrStr,
"%p", pUserData);
11737 void VmaRecorder::WriteConfiguration(
11738 const VkPhysicalDeviceProperties& devProps,
11739 const VkPhysicalDeviceMemoryProperties& memProps,
11740 bool dedicatedAllocationExtensionEnabled)
11742 fprintf(m_File,
"Config,Begin\n");
11744 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
11745 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
11746 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
11747 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
11748 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
11749 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
11751 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
11752 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
11753 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
11755 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
11756 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
11758 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
11759 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
11761 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
11762 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
11764 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
11765 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
11768 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
11770 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
11771 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
11772 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
11773 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
11774 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
11775 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
11776 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
11777 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
11778 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
11780 fprintf(m_File,
"Config,End\n");
11783 void VmaRecorder::GetBasicParams(CallParams& outParams)
11785 outParams.threadId = GetCurrentThreadId();
11787 LARGE_INTEGER counter;
11788 QueryPerformanceCounter(&counter);
11789 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
11792 void VmaRecorder::Flush()
11800 #endif // #if VMA_RECORDING_ENABLED 11808 m_hDevice(pCreateInfo->device),
11809 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
11810 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
11811 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
11812 m_PreferredLargeHeapBlockSize(0),
11813 m_PhysicalDevice(pCreateInfo->physicalDevice),
11814 m_CurrentFrameIndex(0),
11815 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
11818 ,m_pRecorder(VMA_NULL)
11821 if(VMA_DEBUG_DETECT_CORRUPTION)
11824 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
11829 #if !(VMA_DEDICATED_ALLOCATION) 11832 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
11836 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
11837 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
11838 memset(&m_MemProps, 0,
sizeof(m_MemProps));
11840 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
11841 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
11843 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
11845 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
11856 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
11857 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
11859 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
11860 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
11861 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
11862 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
11869 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
11871 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
11872 if(limit != VK_WHOLE_SIZE)
11874 m_HeapSizeLimit[heapIndex] = limit;
11875 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
11877 m_MemProps.memoryHeaps[heapIndex].size = limit;
11883 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11885 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
11887 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
11890 preferredBlockSize,
11893 GetBufferImageGranularity(),
11900 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
11907 VkResult res = VK_SUCCESS;
11912 #if VMA_RECORDING_ENABLED 11913 m_pRecorder = vma_new(
this, VmaRecorder)();
11915 if(res != VK_SUCCESS)
11919 m_pRecorder->WriteConfiguration(
11920 m_PhysicalDeviceProperties,
11922 m_UseKhrDedicatedAllocation);
11923 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
11925 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
11926 return VK_ERROR_FEATURE_NOT_PRESENT;
11933 VmaAllocator_T::~VmaAllocator_T()
11935 #if VMA_RECORDING_ENABLED 11936 if(m_pRecorder != VMA_NULL)
11938 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
11939 vma_delete(
this, m_pRecorder);
11943 VMA_ASSERT(m_Pools.empty());
11945 for(
size_t i = GetMemoryTypeCount(); i--; )
11947 vma_delete(
this, m_pDedicatedAllocations[i]);
11948 vma_delete(
this, m_pBlockVectors[i]);
11952 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
11954 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11955 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
11956 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
11957 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
11958 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
11959 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
11960 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
11961 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
11962 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
11963 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
11964 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
11965 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
11966 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
11967 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
11968 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
11969 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
11970 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
11971 #if VMA_DEDICATED_ALLOCATION 11972 if(m_UseKhrDedicatedAllocation)
11974 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
11975 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
11976 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
11977 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
11979 #endif // #if VMA_DEDICATED_ALLOCATION 11980 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11982 #define VMA_COPY_IF_NOT_NULL(funcName) \ 11983 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 11985 if(pVulkanFunctions != VMA_NULL)
11987 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
11988 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
11989 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
11990 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
11991 VMA_COPY_IF_NOT_NULL(vkMapMemory);
11992 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
11993 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
11994 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
11995 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
11996 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
11997 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
11998 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
11999 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
12000 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
12001 VMA_COPY_IF_NOT_NULL(vkCreateImage);
12002 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
12003 #if VMA_DEDICATED_ALLOCATION 12004 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
12005 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
12009 #undef VMA_COPY_IF_NOT_NULL 12013 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
12014 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
12015 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
12016 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
12017 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
12018 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
12019 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
12020 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
12021 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
12022 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
12023 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
12024 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
12025 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
12026 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
12027 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
12028 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
12029 #if VMA_DEDICATED_ALLOCATION 12030 if(m_UseKhrDedicatedAllocation)
12032 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
12033 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
12038 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
12040 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12041 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
12042 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
12043 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
12046 VkResult VmaAllocator_T::AllocateMemoryOfType(
12048 VkDeviceSize alignment,
12049 bool dedicatedAllocation,
12050 VkBuffer dedicatedBuffer,
12051 VkImage dedicatedImage,
12053 uint32_t memTypeIndex,
12054 VmaSuballocationType suballocType,
12057 VMA_ASSERT(pAllocation != VMA_NULL);
12058 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
12064 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12069 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
12070 VMA_ASSERT(blockVector);
12072 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
12073 bool preferDedicatedMemory =
12074 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
12075 dedicatedAllocation ||
12077 size > preferredBlockSize / 2;
12079 if(preferDedicatedMemory &&
12081 finalCreateInfo.
pool == VK_NULL_HANDLE)
12090 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12094 return AllocateDedicatedMemory(
12108 VkResult res = blockVector->Allocate(
12110 m_CurrentFrameIndex.load(),
12116 if(res == VK_SUCCESS)
12124 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12128 res = AllocateDedicatedMemory(
12134 finalCreateInfo.pUserData,
12138 if(res == VK_SUCCESS)
12141 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
12147 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12154 VkResult VmaAllocator_T::AllocateDedicatedMemory(
12156 VmaSuballocationType suballocType,
12157 uint32_t memTypeIndex,
12159 bool isUserDataString,
12161 VkBuffer dedicatedBuffer,
12162 VkImage dedicatedImage,
12165 VMA_ASSERT(pAllocation);
12167 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12168 allocInfo.memoryTypeIndex = memTypeIndex;
12169 allocInfo.allocationSize = size;
12171 #if VMA_DEDICATED_ALLOCATION 12172 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
12173 if(m_UseKhrDedicatedAllocation)
12175 if(dedicatedBuffer != VK_NULL_HANDLE)
12177 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
12178 dedicatedAllocInfo.buffer = dedicatedBuffer;
12179 allocInfo.pNext = &dedicatedAllocInfo;
12181 else if(dedicatedImage != VK_NULL_HANDLE)
12183 dedicatedAllocInfo.image = dedicatedImage;
12184 allocInfo.pNext = &dedicatedAllocInfo;
12187 #endif // #if VMA_DEDICATED_ALLOCATION 12190 VkDeviceMemory hMemory = VK_NULL_HANDLE;
12191 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
12194 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12198 void* pMappedData = VMA_NULL;
12201 res = (*m_VulkanFunctions.vkMapMemory)(
12210 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
12211 FreeVulkanMemory(memTypeIndex, size, hMemory);
12216 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
12217 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
12218 (*pAllocation)->SetUserData(
this, pUserData);
12219 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12221 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12226 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12227 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12228 VMA_ASSERT(pDedicatedAllocations);
12229 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
12232 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
12237 void VmaAllocator_T::GetBufferMemoryRequirements(
12239 VkMemoryRequirements& memReq,
12240 bool& requiresDedicatedAllocation,
12241 bool& prefersDedicatedAllocation)
const 12243 #if VMA_DEDICATED_ALLOCATION 12244 if(m_UseKhrDedicatedAllocation)
12246 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
12247 memReqInfo.buffer = hBuffer;
12249 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12251 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12252 memReq2.pNext = &memDedicatedReq;
12254 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12256 memReq = memReq2.memoryRequirements;
12257 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12258 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12261 #endif // #if VMA_DEDICATED_ALLOCATION 12263 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
12264 requiresDedicatedAllocation =
false;
12265 prefersDedicatedAllocation =
false;
12269 void VmaAllocator_T::GetImageMemoryRequirements(
12271 VkMemoryRequirements& memReq,
12272 bool& requiresDedicatedAllocation,
12273 bool& prefersDedicatedAllocation)
const 12275 #if VMA_DEDICATED_ALLOCATION 12276 if(m_UseKhrDedicatedAllocation)
12278 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
12279 memReqInfo.image = hImage;
12281 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12283 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12284 memReq2.pNext = &memDedicatedReq;
12286 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12288 memReq = memReq2.memoryRequirements;
12289 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12290 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12293 #endif // #if VMA_DEDICATED_ALLOCATION 12295 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
12296 requiresDedicatedAllocation =
false;
12297 prefersDedicatedAllocation =
false;
12301 VkResult VmaAllocator_T::AllocateMemory(
12302 const VkMemoryRequirements& vkMemReq,
12303 bool requiresDedicatedAllocation,
12304 bool prefersDedicatedAllocation,
12305 VkBuffer dedicatedBuffer,
12306 VkImage dedicatedImage,
12308 VmaSuballocationType suballocType,
12311 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
12313 if(vkMemReq.size == 0)
12315 return VK_ERROR_VALIDATION_FAILED_EXT;
12320 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
12321 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12326 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
12327 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12329 if(requiresDedicatedAllocation)
12333 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
12334 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12336 if(createInfo.
pool != VK_NULL_HANDLE)
12338 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
12339 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12342 if((createInfo.
pool != VK_NULL_HANDLE) &&
12345 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
12346 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12349 if(createInfo.
pool != VK_NULL_HANDLE)
12351 const VkDeviceSize alignmentForPool = VMA_MAX(
12352 vkMemReq.alignment,
12353 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
12354 return createInfo.
pool->m_BlockVector.Allocate(
12356 m_CurrentFrameIndex.load(),
12366 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
12367 uint32_t memTypeIndex = UINT32_MAX;
12369 if(res == VK_SUCCESS)
12371 VkDeviceSize alignmentForMemType = VMA_MAX(
12372 vkMemReq.alignment,
12373 GetMemoryTypeMinAlignment(memTypeIndex));
12375 res = AllocateMemoryOfType(
12377 alignmentForMemType,
12378 requiresDedicatedAllocation || prefersDedicatedAllocation,
12386 if(res == VK_SUCCESS)
12396 memoryTypeBits &= ~(1u << memTypeIndex);
12399 if(res == VK_SUCCESS)
12401 alignmentForMemType = VMA_MAX(
12402 vkMemReq.alignment,
12403 GetMemoryTypeMinAlignment(memTypeIndex));
12405 res = AllocateMemoryOfType(
12407 alignmentForMemType,
12408 requiresDedicatedAllocation || prefersDedicatedAllocation,
12416 if(res == VK_SUCCESS)
12426 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12437 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
12439 VMA_ASSERT(allocation);
12441 if(TouchAllocation(allocation))
12443 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12445 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
12448 switch(allocation->GetType())
12450 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12452 VmaBlockVector* pBlockVector = VMA_NULL;
12453 VmaPool hPool = allocation->GetPool();
12454 if(hPool != VK_NULL_HANDLE)
12456 pBlockVector = &hPool->m_BlockVector;
12460 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12461 pBlockVector = m_pBlockVectors[memTypeIndex];
12463 pBlockVector->Free(allocation);
12466 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12467 FreeDedicatedMemory(allocation);
12474 allocation->SetUserData(
this, VMA_NULL);
12475 vma_delete(
this, allocation);
12478 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
12481 InitStatInfo(pStats->
total);
12482 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
12484 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12488 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12490 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12491 VMA_ASSERT(pBlockVector);
12492 pBlockVector->AddStats(pStats);
12497 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12498 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12500 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
12505 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12507 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12508 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12509 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12510 VMA_ASSERT(pDedicatedAllocVector);
12511 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
12514 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
12515 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12516 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12517 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12522 VmaPostprocessCalcStatInfo(pStats->
total);
12523 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
12524 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
12525 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
12526 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
12529 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
12531 VkResult VmaAllocator_T::Defragment(
12533 size_t allocationCount,
12534 VkBool32* pAllocationsChanged,
12538 if(pAllocationsChanged != VMA_NULL)
12540 memset(pAllocationsChanged, 0, allocationCount *
sizeof(VkBool32));
12542 if(pDefragmentationStats != VMA_NULL)
12544 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
12547 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
12549 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
12551 const size_t poolCount = m_Pools.size();
12554 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12557 VMA_ASSERT(hAlloc);
12558 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
12560 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12561 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
12563 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
12565 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
12567 VmaBlockVector* pAllocBlockVector = VMA_NULL;
12569 const VmaPool hAllocPool = hAlloc->GetPool();
12571 if(hAllocPool != VK_NULL_HANDLE)
12574 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
12576 pAllocBlockVector = &hAllocPool->m_BlockVector;
12582 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
12585 if(pAllocBlockVector != VMA_NULL)
12587 VmaDefragmentator*
const pDefragmentator =
12588 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
12589 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
12590 &pAllocationsChanged[allocIndex] : VMA_NULL;
12591 pDefragmentator->AddAllocation(hAlloc, pChanged);
12596 VkResult result = VK_SUCCESS;
12600 VkDeviceSize maxBytesToMove = SIZE_MAX;
12601 uint32_t maxAllocationsToMove = UINT32_MAX;
12602 if(pDefragmentationInfo != VMA_NULL)
12609 for(uint32_t memTypeIndex = 0;
12610 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
12614 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12616 result = m_pBlockVectors[memTypeIndex]->Defragment(
12617 pDefragmentationStats,
12619 maxAllocationsToMove);
12624 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
12626 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
12627 pDefragmentationStats,
12629 maxAllocationsToMove);
12635 for(
size_t poolIndex = poolCount; poolIndex--; )
12637 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
12641 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
12643 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12645 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
12654 if(hAllocation->CanBecomeLost())
12660 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12661 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12664 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12668 pAllocationInfo->
offset = 0;
12669 pAllocationInfo->
size = hAllocation->GetSize();
12671 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12674 else if(localLastUseFrameIndex == localCurrFrameIndex)
12676 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12677 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12678 pAllocationInfo->
offset = hAllocation->GetOffset();
12679 pAllocationInfo->
size = hAllocation->GetSize();
12681 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12686 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12688 localLastUseFrameIndex = localCurrFrameIndex;
12695 #if VMA_STATS_STRING_ENABLED 12696 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12697 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12700 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12701 if(localLastUseFrameIndex == localCurrFrameIndex)
12707 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12709 localLastUseFrameIndex = localCurrFrameIndex;
12715 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12716 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12717 pAllocationInfo->
offset = hAllocation->GetOffset();
12718 pAllocationInfo->
size = hAllocation->GetSize();
12719 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
12720 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12724 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
12727 if(hAllocation->CanBecomeLost())
12729 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12730 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12733 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12737 else if(localLastUseFrameIndex == localCurrFrameIndex)
12743 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12745 localLastUseFrameIndex = localCurrFrameIndex;
12752 #if VMA_STATS_STRING_ENABLED 12753 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12754 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12757 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12758 if(localLastUseFrameIndex == localCurrFrameIndex)
12764 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12766 localLastUseFrameIndex = localCurrFrameIndex;
12778 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
12788 return VK_ERROR_INITIALIZATION_FAILED;
12791 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
12793 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
12795 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
12796 if(res != VK_SUCCESS)
12798 vma_delete(
this, *pPool);
12805 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12806 (*pPool)->SetId(m_NextPoolId++);
12807 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
12813 void VmaAllocator_T::DestroyPool(
VmaPool pool)
12817 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12818 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
12819 VMA_ASSERT(success &&
"Pool not found in Allocator.");
12822 vma_delete(
this, pool);
12827 pool->m_BlockVector.GetPoolStats(pPoolStats);
12830 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
12832 m_CurrentFrameIndex.store(frameIndex);
12835 void VmaAllocator_T::MakePoolAllocationsLost(
12837 size_t* pLostAllocationCount)
12839 hPool->m_BlockVector.MakePoolAllocationsLost(
12840 m_CurrentFrameIndex.load(),
12841 pLostAllocationCount);
12844 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
12846 return hPool->m_BlockVector.CheckCorruption();
12849 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
12851 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
12854 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12856 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
12858 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12859 VMA_ASSERT(pBlockVector);
12860 VkResult localRes = pBlockVector->CheckCorruption();
12863 case VK_ERROR_FEATURE_NOT_PRESENT:
12866 finalRes = VK_SUCCESS;
12876 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12877 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12879 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
12881 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
12884 case VK_ERROR_FEATURE_NOT_PRESENT:
12887 finalRes = VK_SUCCESS;
12899 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
12901 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
12902 (*pAllocation)->InitLost();
12905 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
12907 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
12910 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
12912 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
12913 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
12915 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
12916 if(res == VK_SUCCESS)
12918 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
12923 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
12928 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
12931 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
12933 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
12939 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
12941 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
12943 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
12946 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
12948 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
12949 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
12951 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
12952 m_HeapSizeLimit[heapIndex] += size;
12956 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
12958 if(hAllocation->CanBecomeLost())
12960 return VK_ERROR_MEMORY_MAP_FAILED;
12963 switch(hAllocation->GetType())
12965 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12967 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12968 char *pBytes = VMA_NULL;
12969 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
12970 if(res == VK_SUCCESS)
12972 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
12973 hAllocation->BlockAllocMap();
12977 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12978 return hAllocation->DedicatedAllocMap(
this, ppData);
12981 return VK_ERROR_MEMORY_MAP_FAILED;
12987 switch(hAllocation->GetType())
12989 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12991 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12992 hAllocation->BlockAllocUnmap();
12993 pBlock->Unmap(
this, 1);
12996 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12997 hAllocation->DedicatedAllocUnmap(
this);
13004 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
13006 VkResult res = VK_SUCCESS;
13007 switch(hAllocation->GetType())
13009 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13010 res = GetVulkanFunctions().vkBindBufferMemory(
13013 hAllocation->GetMemory(),
13016 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13018 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13019 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
13020 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
13029 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
13031 VkResult res = VK_SUCCESS;
13032 switch(hAllocation->GetType())
13034 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13035 res = GetVulkanFunctions().vkBindImageMemory(
13038 hAllocation->GetMemory(),
13041 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13043 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13044 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
13045 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
13054 void VmaAllocator_T::FlushOrInvalidateAllocation(
13056 VkDeviceSize offset, VkDeviceSize size,
13057 VMA_CACHE_OPERATION op)
13059 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
13060 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
13062 const VkDeviceSize allocationSize = hAllocation->GetSize();
13063 VMA_ASSERT(offset <= allocationSize);
13065 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13067 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13068 memRange.memory = hAllocation->GetMemory();
13070 switch(hAllocation->GetType())
13072 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13073 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13074 if(size == VK_WHOLE_SIZE)
13076 memRange.size = allocationSize - memRange.offset;
13080 VMA_ASSERT(offset + size <= allocationSize);
13081 memRange.size = VMA_MIN(
13082 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
13083 allocationSize - memRange.offset);
13087 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13090 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13091 if(size == VK_WHOLE_SIZE)
13093 size = allocationSize - offset;
13097 VMA_ASSERT(offset + size <= allocationSize);
13099 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
13102 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
13103 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
13104 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
13105 memRange.offset += allocationOffset;
13106 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
13117 case VMA_CACHE_FLUSH:
13118 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
13120 case VMA_CACHE_INVALIDATE:
13121 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
13130 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
13132 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
13134 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
13136 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13137 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
13138 VMA_ASSERT(pDedicatedAllocations);
13139 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
13140 VMA_ASSERT(success);
13143 VkDeviceMemory hMemory = allocation->GetMemory();
13155 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
13157 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
13160 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
13162 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
13163 !hAllocation->CanBecomeLost() &&
13164 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13166 void* pData = VMA_NULL;
13167 VkResult res = Map(hAllocation, &pData);
13168 if(res == VK_SUCCESS)
13170 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
13171 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
13172 Unmap(hAllocation);
13176 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
13181 #if VMA_STATS_STRING_ENABLED 13183 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
13185 bool dedicatedAllocationsStarted =
false;
13186 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13188 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13189 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
13190 VMA_ASSERT(pDedicatedAllocVector);
13191 if(pDedicatedAllocVector->empty() ==
false)
13193 if(dedicatedAllocationsStarted ==
false)
13195 dedicatedAllocationsStarted =
true;
13196 json.WriteString(
"DedicatedAllocations");
13197 json.BeginObject();
13200 json.BeginString(
"Type ");
13201 json.ContinueString(memTypeIndex);
13206 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
13208 json.BeginObject(
true);
13210 hAlloc->PrintParameters(json);
13217 if(dedicatedAllocationsStarted)
13223 bool allocationsStarted =
false;
13224 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13226 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
13228 if(allocationsStarted ==
false)
13230 allocationsStarted =
true;
13231 json.WriteString(
"DefaultPools");
13232 json.BeginObject();
13235 json.BeginString(
"Type ");
13236 json.ContinueString(memTypeIndex);
13239 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
13242 if(allocationsStarted)
13250 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13251 const size_t poolCount = m_Pools.size();
13254 json.WriteString(
"Pools");
13255 json.BeginObject();
13256 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13258 json.BeginString();
13259 json.ContinueString(m_Pools[poolIndex]->GetId());
13262 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
13269 #endif // #if VMA_STATS_STRING_ENABLED 13278 VMA_ASSERT(pCreateInfo && pAllocator);
13279 VMA_DEBUG_LOG(
"vmaCreateAllocator");
13281 return (*pAllocator)->Init(pCreateInfo);
13287 if(allocator != VK_NULL_HANDLE)
13289 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
13290 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
13291 vma_delete(&allocationCallbacks, allocator);
13297 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
13299 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
13300 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
13305 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
13307 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
13308 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
13313 uint32_t memoryTypeIndex,
13314 VkMemoryPropertyFlags* pFlags)
13316 VMA_ASSERT(allocator && pFlags);
13317 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
13318 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
13323 uint32_t frameIndex)
13325 VMA_ASSERT(allocator);
13326 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
13328 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13330 allocator->SetCurrentFrameIndex(frameIndex);
13337 VMA_ASSERT(allocator && pStats);
13338 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13339 allocator->CalculateStats(pStats);
13342 #if VMA_STATS_STRING_ENABLED 13346 char** ppStatsString,
13347 VkBool32 detailedMap)
13349 VMA_ASSERT(allocator && ppStatsString);
13350 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13352 VmaStringBuilder sb(allocator);
13354 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
13355 json.BeginObject();
13358 allocator->CalculateStats(&stats);
13360 json.WriteString(
"Total");
13361 VmaPrintStatInfo(json, stats.
total);
13363 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
13365 json.BeginString(
"Heap ");
13366 json.ContinueString(heapIndex);
13368 json.BeginObject();
13370 json.WriteString(
"Size");
13371 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
13373 json.WriteString(
"Flags");
13374 json.BeginArray(
true);
13375 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
13377 json.WriteString(
"DEVICE_LOCAL");
13383 json.WriteString(
"Stats");
13384 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
13387 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
13389 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
13391 json.BeginString(
"Type ");
13392 json.ContinueString(typeIndex);
13395 json.BeginObject();
13397 json.WriteString(
"Flags");
13398 json.BeginArray(
true);
13399 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
13400 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
13402 json.WriteString(
"DEVICE_LOCAL");
13404 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13406 json.WriteString(
"HOST_VISIBLE");
13408 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
13410 json.WriteString(
"HOST_COHERENT");
13412 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
13414 json.WriteString(
"HOST_CACHED");
13416 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
13418 json.WriteString(
"LAZILY_ALLOCATED");
13424 json.WriteString(
"Stats");
13425 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
13434 if(detailedMap == VK_TRUE)
13436 allocator->PrintDetailedMap(json);
13442 const size_t len = sb.GetLength();
13443 char*
const pChars = vma_new_array(allocator,
char, len + 1);
13446 memcpy(pChars, sb.GetData(), len);
13448 pChars[len] =
'\0';
13449 *ppStatsString = pChars;
13454 char* pStatsString)
13456 if(pStatsString != VMA_NULL)
13458 VMA_ASSERT(allocator);
13459 size_t len = strlen(pStatsString);
13460 vma_delete_array(allocator, pStatsString, len + 1);
13464 #endif // #if VMA_STATS_STRING_ENABLED 13471 uint32_t memoryTypeBits,
13473 uint32_t* pMemoryTypeIndex)
13475 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13476 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13477 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13484 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
13485 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
13490 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13494 switch(pAllocationCreateInfo->
usage)
13499 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13501 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13505 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13508 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13509 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13511 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13515 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13516 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
13522 *pMemoryTypeIndex = UINT32_MAX;
13523 uint32_t minCost = UINT32_MAX;
13524 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
13525 memTypeIndex < allocator->GetMemoryTypeCount();
13526 ++memTypeIndex, memTypeBit <<= 1)
13529 if((memTypeBit & memoryTypeBits) != 0)
13531 const VkMemoryPropertyFlags currFlags =
13532 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
13534 if((requiredFlags & ~currFlags) == 0)
13537 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
13539 if(currCost < minCost)
13541 *pMemoryTypeIndex = memTypeIndex;
13546 minCost = currCost;
13551 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
13556 const VkBufferCreateInfo* pBufferCreateInfo,
13558 uint32_t* pMemoryTypeIndex)
13560 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13561 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
13562 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13563 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13565 const VkDevice hDev = allocator->m_hDevice;
13566 VkBuffer hBuffer = VK_NULL_HANDLE;
13567 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
13568 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
13569 if(res == VK_SUCCESS)
13571 VkMemoryRequirements memReq = {};
13572 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
13573 hDev, hBuffer, &memReq);
13577 memReq.memoryTypeBits,
13578 pAllocationCreateInfo,
13581 allocator->GetVulkanFunctions().vkDestroyBuffer(
13582 hDev, hBuffer, allocator->GetAllocationCallbacks());
13589 const VkImageCreateInfo* pImageCreateInfo,
13591 uint32_t* pMemoryTypeIndex)
13593 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13594 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
13595 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13596 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13598 const VkDevice hDev = allocator->m_hDevice;
13599 VkImage hImage = VK_NULL_HANDLE;
13600 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
13601 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
13602 if(res == VK_SUCCESS)
13604 VkMemoryRequirements memReq = {};
13605 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
13606 hDev, hImage, &memReq);
13610 memReq.memoryTypeBits,
13611 pAllocationCreateInfo,
13614 allocator->GetVulkanFunctions().vkDestroyImage(
13615 hDev, hImage, allocator->GetAllocationCallbacks());
13625 VMA_ASSERT(allocator && pCreateInfo && pPool);
13627 VMA_DEBUG_LOG(
"vmaCreatePool");
13629 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13631 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
13633 #if VMA_RECORDING_ENABLED 13634 if(allocator->GetRecorder() != VMA_NULL)
13636 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
13647 VMA_ASSERT(allocator);
13649 if(pool == VK_NULL_HANDLE)
13654 VMA_DEBUG_LOG(
"vmaDestroyPool");
13656 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13658 #if VMA_RECORDING_ENABLED 13659 if(allocator->GetRecorder() != VMA_NULL)
13661 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
13665 allocator->DestroyPool(pool);
13673 VMA_ASSERT(allocator && pool && pPoolStats);
13675 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13677 allocator->GetPoolStats(pool, pPoolStats);
13683 size_t* pLostAllocationCount)
13685 VMA_ASSERT(allocator && pool);
13687 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13689 #if VMA_RECORDING_ENABLED 13690 if(allocator->GetRecorder() != VMA_NULL)
13692 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
13696 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
13701 VMA_ASSERT(allocator && pool);
13703 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13705 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
13707 return allocator->CheckPoolCorruption(pool);
13712 const VkMemoryRequirements* pVkMemoryRequirements,
13717 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
13719 VMA_DEBUG_LOG(
"vmaAllocateMemory");
13721 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13723 VkResult result = allocator->AllocateMemory(
13724 *pVkMemoryRequirements,
13730 VMA_SUBALLOCATION_TYPE_UNKNOWN,
13733 #if VMA_RECORDING_ENABLED 13734 if(allocator->GetRecorder() != VMA_NULL)
13736 allocator->GetRecorder()->RecordAllocateMemory(
13737 allocator->GetCurrentFrameIndex(),
13738 *pVkMemoryRequirements,
13744 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
13746 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13759 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13761 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
13763 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13765 VkMemoryRequirements vkMemReq = {};
13766 bool requiresDedicatedAllocation =
false;
13767 bool prefersDedicatedAllocation =
false;
13768 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
13769 requiresDedicatedAllocation,
13770 prefersDedicatedAllocation);
13772 VkResult result = allocator->AllocateMemory(
13774 requiresDedicatedAllocation,
13775 prefersDedicatedAllocation,
13779 VMA_SUBALLOCATION_TYPE_BUFFER,
13782 #if VMA_RECORDING_ENABLED 13783 if(allocator->GetRecorder() != VMA_NULL)
13785 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
13786 allocator->GetCurrentFrameIndex(),
13788 requiresDedicatedAllocation,
13789 prefersDedicatedAllocation,
13795 if(pAllocationInfo && result == VK_SUCCESS)
13797 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13810 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13812 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
13814 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13816 VkMemoryRequirements vkMemReq = {};
13817 bool requiresDedicatedAllocation =
false;
13818 bool prefersDedicatedAllocation =
false;
13819 allocator->GetImageMemoryRequirements(image, vkMemReq,
13820 requiresDedicatedAllocation, prefersDedicatedAllocation);
13822 VkResult result = allocator->AllocateMemory(
13824 requiresDedicatedAllocation,
13825 prefersDedicatedAllocation,
13829 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
13832 #if VMA_RECORDING_ENABLED 13833 if(allocator->GetRecorder() != VMA_NULL)
13835 allocator->GetRecorder()->RecordAllocateMemoryForImage(
13836 allocator->GetCurrentFrameIndex(),
13838 requiresDedicatedAllocation,
13839 prefersDedicatedAllocation,
13845 if(pAllocationInfo && result == VK_SUCCESS)
13847 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13857 VMA_ASSERT(allocator);
13859 if(allocation == VK_NULL_HANDLE)
13864 VMA_DEBUG_LOG(
"vmaFreeMemory");
13866 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13868 #if VMA_RECORDING_ENABLED 13869 if(allocator->GetRecorder() != VMA_NULL)
13871 allocator->GetRecorder()->RecordFreeMemory(
13872 allocator->GetCurrentFrameIndex(),
13877 allocator->FreeMemory(allocation);
13885 VMA_ASSERT(allocator && allocation && pAllocationInfo);
13887 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13889 #if VMA_RECORDING_ENABLED 13890 if(allocator->GetRecorder() != VMA_NULL)
13892 allocator->GetRecorder()->RecordGetAllocationInfo(
13893 allocator->GetCurrentFrameIndex(),
13898 allocator->GetAllocationInfo(allocation, pAllocationInfo);
13905 VMA_ASSERT(allocator && allocation);
13907 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13909 #if VMA_RECORDING_ENABLED 13910 if(allocator->GetRecorder() != VMA_NULL)
13912 allocator->GetRecorder()->RecordTouchAllocation(
13913 allocator->GetCurrentFrameIndex(),
13918 return allocator->TouchAllocation(allocation);
13926 VMA_ASSERT(allocator && allocation);
13928 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13930 allocation->SetUserData(allocator, pUserData);
13932 #if VMA_RECORDING_ENABLED 13933 if(allocator->GetRecorder() != VMA_NULL)
13935 allocator->GetRecorder()->RecordSetAllocationUserData(
13936 allocator->GetCurrentFrameIndex(),
13947 VMA_ASSERT(allocator && pAllocation);
13949 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
13951 allocator->CreateLostAllocation(pAllocation);
13953 #if VMA_RECORDING_ENABLED 13954 if(allocator->GetRecorder() != VMA_NULL)
13956 allocator->GetRecorder()->RecordCreateLostAllocation(
13957 allocator->GetCurrentFrameIndex(),
13968 VMA_ASSERT(allocator && allocation && ppData);
13970 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13972 VkResult res = allocator->Map(allocation, ppData);
13974 #if VMA_RECORDING_ENABLED 13975 if(allocator->GetRecorder() != VMA_NULL)
13977 allocator->GetRecorder()->RecordMapMemory(
13978 allocator->GetCurrentFrameIndex(),
13990 VMA_ASSERT(allocator && allocation);
13992 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13994 #if VMA_RECORDING_ENABLED 13995 if(allocator->GetRecorder() != VMA_NULL)
13997 allocator->GetRecorder()->RecordUnmapMemory(
13998 allocator->GetCurrentFrameIndex(),
14003 allocator->Unmap(allocation);
14008 VMA_ASSERT(allocator && allocation);
14010 VMA_DEBUG_LOG(
"vmaFlushAllocation");
14012 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14014 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
14016 #if VMA_RECORDING_ENABLED 14017 if(allocator->GetRecorder() != VMA_NULL)
14019 allocator->GetRecorder()->RecordFlushAllocation(
14020 allocator->GetCurrentFrameIndex(),
14021 allocation, offset, size);
14028 VMA_ASSERT(allocator && allocation);
14030 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
14032 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14034 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
14036 #if VMA_RECORDING_ENABLED 14037 if(allocator->GetRecorder() != VMA_NULL)
14039 allocator->GetRecorder()->RecordInvalidateAllocation(
14040 allocator->GetCurrentFrameIndex(),
14041 allocation, offset, size);
14048 VMA_ASSERT(allocator);
14050 VMA_DEBUG_LOG(
"vmaCheckCorruption");
14052 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14054 return allocator->CheckCorruption(memoryTypeBits);
14060 size_t allocationCount,
14061 VkBool32* pAllocationsChanged,
14065 VMA_ASSERT(allocator && pAllocations);
14067 VMA_DEBUG_LOG(
"vmaDefragment");
14069 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14071 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
14079 VMA_ASSERT(allocator && allocation && buffer);
14081 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
14083 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14085 return allocator->BindBufferMemory(allocation, buffer);
14093 VMA_ASSERT(allocator && allocation && image);
14095 VMA_DEBUG_LOG(
"vmaBindImageMemory");
14097 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14099 return allocator->BindImageMemory(allocation, image);
14104 const VkBufferCreateInfo* pBufferCreateInfo,
14110 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
14112 if(pBufferCreateInfo->size == 0)
14114 return VK_ERROR_VALIDATION_FAILED_EXT;
14117 VMA_DEBUG_LOG(
"vmaCreateBuffer");
14119 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14121 *pBuffer = VK_NULL_HANDLE;
14122 *pAllocation = VK_NULL_HANDLE;
14125 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
14126 allocator->m_hDevice,
14128 allocator->GetAllocationCallbacks(),
14133 VkMemoryRequirements vkMemReq = {};
14134 bool requiresDedicatedAllocation =
false;
14135 bool prefersDedicatedAllocation =
false;
14136 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
14137 requiresDedicatedAllocation, prefersDedicatedAllocation);
14141 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
14143 VMA_ASSERT(vkMemReq.alignment %
14144 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
14146 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
14148 VMA_ASSERT(vkMemReq.alignment %
14149 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
14151 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
14153 VMA_ASSERT(vkMemReq.alignment %
14154 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
14158 res = allocator->AllocateMemory(
14160 requiresDedicatedAllocation,
14161 prefersDedicatedAllocation,
14164 *pAllocationCreateInfo,
14165 VMA_SUBALLOCATION_TYPE_BUFFER,
14168 #if VMA_RECORDING_ENABLED 14169 if(allocator->GetRecorder() != VMA_NULL)
14171 allocator->GetRecorder()->RecordCreateBuffer(
14172 allocator->GetCurrentFrameIndex(),
14173 *pBufferCreateInfo,
14174 *pAllocationCreateInfo,
14182 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
14186 #if VMA_STATS_STRING_ENABLED 14187 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
14189 if(pAllocationInfo != VMA_NULL)
14191 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14196 allocator->FreeMemory(*pAllocation);
14197 *pAllocation = VK_NULL_HANDLE;
14198 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14199 *pBuffer = VK_NULL_HANDLE;
14202 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14203 *pBuffer = VK_NULL_HANDLE;
14214 VMA_ASSERT(allocator);
14216 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14221 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
14223 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14225 #if VMA_RECORDING_ENABLED 14226 if(allocator->GetRecorder() != VMA_NULL)
14228 allocator->GetRecorder()->RecordDestroyBuffer(
14229 allocator->GetCurrentFrameIndex(),
14234 if(buffer != VK_NULL_HANDLE)
14236 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
14239 if(allocation != VK_NULL_HANDLE)
14241 allocator->FreeMemory(allocation);
14247 const VkImageCreateInfo* pImageCreateInfo,
14253 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
14255 if(pImageCreateInfo->extent.width == 0 ||
14256 pImageCreateInfo->extent.height == 0 ||
14257 pImageCreateInfo->extent.depth == 0 ||
14258 pImageCreateInfo->mipLevels == 0 ||
14259 pImageCreateInfo->arrayLayers == 0)
14261 return VK_ERROR_VALIDATION_FAILED_EXT;
14264 VMA_DEBUG_LOG(
"vmaCreateImage");
14266 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14268 *pImage = VK_NULL_HANDLE;
14269 *pAllocation = VK_NULL_HANDLE;
14272 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
14273 allocator->m_hDevice,
14275 allocator->GetAllocationCallbacks(),
14279 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
14280 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
14281 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
14284 VkMemoryRequirements vkMemReq = {};
14285 bool requiresDedicatedAllocation =
false;
14286 bool prefersDedicatedAllocation =
false;
14287 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
14288 requiresDedicatedAllocation, prefersDedicatedAllocation);
14290 res = allocator->AllocateMemory(
14292 requiresDedicatedAllocation,
14293 prefersDedicatedAllocation,
14296 *pAllocationCreateInfo,
14300 #if VMA_RECORDING_ENABLED 14301 if(allocator->GetRecorder() != VMA_NULL)
14303 allocator->GetRecorder()->RecordCreateImage(
14304 allocator->GetCurrentFrameIndex(),
14306 *pAllocationCreateInfo,
14314 res = allocator->BindImageMemory(*pAllocation, *pImage);
14318 #if VMA_STATS_STRING_ENABLED 14319 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
14321 if(pAllocationInfo != VMA_NULL)
14323 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14328 allocator->FreeMemory(*pAllocation);
14329 *pAllocation = VK_NULL_HANDLE;
14330 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14331 *pImage = VK_NULL_HANDLE;
14334 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14335 *pImage = VK_NULL_HANDLE;
14346 VMA_ASSERT(allocator);
14348 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14353 VMA_DEBUG_LOG(
"vmaDestroyImage");
14355 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14357 #if VMA_RECORDING_ENABLED 14358 if(allocator->GetRecorder() != VMA_NULL)
14360 allocator->GetRecorder()->RecordDestroyImage(
14361 allocator->GetCurrentFrameIndex(),
14366 if(image != VK_NULL_HANDLE)
14368 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
14370 if(allocation != VK_NULL_HANDLE)
14372 allocator->FreeMemory(allocation);
14376 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1584
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1885
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1641
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1615
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2207
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1596
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1842
Definition: vk_mem_alloc.h:1945
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1588
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2307
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1638
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2552
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2096
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1485
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2188
Definition: vk_mem_alloc.h:1922
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1577
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1995
Definition: vk_mem_alloc.h:1869
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1650
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2124
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1703
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1635
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1873
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1775
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1593
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1774
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2556
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1667
VmaStatInfo total
Definition: vk_mem_alloc.h:1784
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2564
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1979
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2547
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1594
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1519
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1644
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2138
Definition: vk_mem_alloc.h:2132
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1710
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2317
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1589
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1613
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2016
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2158
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2194
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1575
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2141
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1820
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2542
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2560
Definition: vk_mem_alloc.h:1859
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2003
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1592
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1780
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1525
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:1963
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1546
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1617
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1551
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2562
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1990
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2204
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1585
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1763
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2153
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1538
Definition: vk_mem_alloc.h:2128
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1929
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1776
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1542
Definition: vk_mem_alloc.h:1953
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2144
Definition: vk_mem_alloc.h:1868
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1591
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1985
Definition: vk_mem_alloc.h:1976
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1766
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1587
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2166
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1653
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2197
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1974
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2009
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1691
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1782
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1909
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1775
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1598
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1623
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1540
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1597
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2180
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1590
Definition: vk_mem_alloc.h:1940
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1631
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2331
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1647
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1775
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1772
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2185
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
Definition: vk_mem_alloc.h:1949
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2312
Definition: vk_mem_alloc.h:1960
Definition: vk_mem_alloc.h:1972
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2558
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1583
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1770
Definition: vk_mem_alloc.h:1825
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2134
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1620
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1768
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1595
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1599
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1896
Definition: vk_mem_alloc.h:1967
Definition: vk_mem_alloc.h:1852
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2326
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1573
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1586
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2113
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2293
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1957
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2078
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1776
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1935
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1607
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1783
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2191
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1776
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2298