23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1483 #ifndef VMA_RECORDING_ENABLED 1485 #define VMA_RECORDING_ENABLED 1 1487 #define VMA_RECORDING_ENABLED 0 1492 #define NOMINMAX // For windows.h 1495 #include <vulkan/vulkan.h> 1497 #if VMA_RECORDING_ENABLED 1498 #include <windows.h> 1501 #if !defined(VMA_DEDICATED_ALLOCATION) 1502 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1503 #define VMA_DEDICATED_ALLOCATION 1 1505 #define VMA_DEDICATED_ALLOCATION 0 1523 uint32_t memoryType,
1524 VkDeviceMemory memory,
1529 uint32_t memoryType,
1530 VkDeviceMemory memory,
1602 #if VMA_DEDICATED_ALLOCATION 1603 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1604 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1730 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1738 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1748 uint32_t memoryTypeIndex,
1749 VkMemoryPropertyFlags* pFlags);
1761 uint32_t frameIndex);
1794 #define VMA_STATS_STRING_ENABLED 1 1796 #if VMA_STATS_STRING_ENABLED 1803 char** ppStatsString,
1804 VkBool32 detailedMap);
1808 char* pStatsString);
1810 #endif // #if VMA_STATS_STRING_ENABLED 2042 uint32_t memoryTypeBits,
2044 uint32_t* pMemoryTypeIndex);
2060 const VkBufferCreateInfo* pBufferCreateInfo,
2062 uint32_t* pMemoryTypeIndex);
2078 const VkImageCreateInfo* pImageCreateInfo,
2080 uint32_t* pMemoryTypeIndex);
2252 size_t* pLostAllocationCount);
2351 const VkMemoryRequirements* pVkMemoryRequirements,
2405 VkDeviceSize newSize);
2638 size_t allocationCount,
2639 VkBool32* pAllocationsChanged,
2705 const VkBufferCreateInfo* pBufferCreateInfo,
2730 const VkImageCreateInfo* pImageCreateInfo,
2756 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2759 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2760 #define VMA_IMPLEMENTATION 2763 #ifdef VMA_IMPLEMENTATION 2764 #undef VMA_IMPLEMENTATION 2786 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2787 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2799 #if VMA_USE_STL_CONTAINERS 2800 #define VMA_USE_STL_VECTOR 1 2801 #define VMA_USE_STL_UNORDERED_MAP 1 2802 #define VMA_USE_STL_LIST 1 2805 #if VMA_USE_STL_VECTOR 2809 #if VMA_USE_STL_UNORDERED_MAP 2810 #include <unordered_map> 2813 #if VMA_USE_STL_LIST 2822 #include <algorithm> 2828 #define VMA_NULL nullptr 2831 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 2833 void *aligned_alloc(
size_t alignment,
size_t size)
2836 if(alignment <
sizeof(
void*))
2838 alignment =
sizeof(
void*);
2841 return memalign(alignment, size);
2843 #elif defined(__APPLE__) || defined(__ANDROID__) 2845 void *aligned_alloc(
size_t alignment,
size_t size)
2848 if(alignment <
sizeof(
void*))
2850 alignment =
sizeof(
void*);
2854 if(posix_memalign(&pointer, alignment, size) == 0)
2868 #define VMA_ASSERT(expr) assert(expr) 2870 #define VMA_ASSERT(expr) 2876 #ifndef VMA_HEAVY_ASSERT 2878 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2880 #define VMA_HEAVY_ASSERT(expr) 2884 #ifndef VMA_ALIGN_OF 2885 #define VMA_ALIGN_OF(type) (__alignof(type)) 2888 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2890 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2892 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2896 #ifndef VMA_SYSTEM_FREE 2898 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2900 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2905 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2909 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2913 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2917 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2920 #ifndef VMA_DEBUG_LOG 2921 #define VMA_DEBUG_LOG(format, ...) 2931 #if VMA_STATS_STRING_ENABLED 2932 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2934 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2936 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2938 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2940 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2942 snprintf(outStr, strLen,
"%p", ptr);
2952 void Lock() { m_Mutex.lock(); }
2953 void Unlock() { m_Mutex.unlock(); }
2957 #define VMA_MUTEX VmaMutex 2968 #ifndef VMA_ATOMIC_UINT32 2969 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2972 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2977 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2980 #ifndef VMA_DEBUG_ALIGNMENT 2985 #define VMA_DEBUG_ALIGNMENT (1) 2988 #ifndef VMA_DEBUG_MARGIN 2993 #define VMA_DEBUG_MARGIN (0) 2996 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3001 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3004 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3010 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3013 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3018 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3021 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3026 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3029 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3030 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3034 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3035 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3039 #ifndef VMA_CLASS_NO_COPY 3040 #define VMA_CLASS_NO_COPY(className) \ 3042 className(const className&) = delete; \ 3043 className& operator=(const className&) = delete; 3046 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3049 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3051 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3052 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3058 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3059 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3062 static inline uint32_t VmaCountBitsSet(uint32_t v)
3064 uint32_t c = v - ((v >> 1) & 0x55555555);
3065 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3066 c = ((c >> 4) + c) & 0x0F0F0F0F;
3067 c = ((c >> 8) + c) & 0x00FF00FF;
3068 c = ((c >> 16) + c) & 0x0000FFFF;
3074 template <
typename T>
3075 static inline T VmaAlignUp(T val, T align)
3077 return (val + align - 1) / align * align;
3081 template <
typename T>
3082 static inline T VmaAlignDown(T val, T align)
3084 return val / align * align;
3088 template <
typename T>
3089 static inline T VmaRoundDiv(T x, T y)
3091 return (x + (y / (T)2)) / y;
3099 template <
typename T>
3100 inline bool VmaIsPow2(T x)
3102 return (x & (x-1)) == 0;
3106 static inline uint32_t VmaNextPow2(uint32_t v)
3117 static inline uint64_t VmaNextPow2(uint64_t v)
3131 static inline uint32_t VmaPrevPow2(uint32_t v)
3141 static inline uint64_t VmaPrevPow2(uint64_t v)
3153 static inline bool VmaStrIsEmpty(
const char* pStr)
3155 return pStr == VMA_NULL || *pStr ==
'\0';
3158 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3176 template<
typename Iterator,
typename Compare>
3177 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3179 Iterator centerValue = end; --centerValue;
3180 Iterator insertIndex = beg;
3181 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3183 if(cmp(*memTypeIndex, *centerValue))
3185 if(insertIndex != memTypeIndex)
3187 VMA_SWAP(*memTypeIndex, *insertIndex);
3192 if(insertIndex != centerValue)
3194 VMA_SWAP(*insertIndex, *centerValue);
3199 template<
typename Iterator,
typename Compare>
3200 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3204 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3205 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3206 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3210 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3212 #endif // #ifndef VMA_SORT 3221 static inline bool VmaBlocksOnSamePage(
3222 VkDeviceSize resourceAOffset,
3223 VkDeviceSize resourceASize,
3224 VkDeviceSize resourceBOffset,
3225 VkDeviceSize pageSize)
3227 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3228 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3229 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3230 VkDeviceSize resourceBStart = resourceBOffset;
3231 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3232 return resourceAEndPage == resourceBStartPage;
3235 enum VmaSuballocationType
3237 VMA_SUBALLOCATION_TYPE_FREE = 0,
3238 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3239 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3240 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3241 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3242 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3243 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3252 static inline bool VmaIsBufferImageGranularityConflict(
3253 VmaSuballocationType suballocType1,
3254 VmaSuballocationType suballocType2)
3256 if(suballocType1 > suballocType2)
3258 VMA_SWAP(suballocType1, suballocType2);
3261 switch(suballocType1)
3263 case VMA_SUBALLOCATION_TYPE_FREE:
3265 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3267 case VMA_SUBALLOCATION_TYPE_BUFFER:
3269 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3270 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3271 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3273 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3274 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3275 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3276 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3278 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3279 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3287 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3289 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3290 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3291 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3293 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3297 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3299 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3300 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3301 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3303 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3314 VMA_CLASS_NO_COPY(VmaMutexLock)
3316 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3317 m_pMutex(useMutex ? &mutex : VMA_NULL)
3334 VMA_MUTEX* m_pMutex;
3337 #if VMA_DEBUG_GLOBAL_MUTEX 3338 static VMA_MUTEX gDebugGlobalMutex;
3339 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3341 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3345 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3356 template <
typename CmpLess,
typename IterT,
typename KeyT>
3357 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3359 size_t down = 0, up = (end - beg);
3362 const size_t mid = (down + up) / 2;
3363 if(cmp(*(beg+mid), key))
3378 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3380 if((pAllocationCallbacks != VMA_NULL) &&
3381 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3383 return (*pAllocationCallbacks->pfnAllocation)(
3384 pAllocationCallbacks->pUserData,
3387 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3391 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3395 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3397 if((pAllocationCallbacks != VMA_NULL) &&
3398 (pAllocationCallbacks->pfnFree != VMA_NULL))
3400 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3404 VMA_SYSTEM_FREE(ptr);
3408 template<
typename T>
3409 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3411 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3414 template<
typename T>
3415 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3417 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3420 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3422 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3424 template<
typename T>
3425 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3428 VmaFree(pAllocationCallbacks, ptr);
3431 template<
typename T>
3432 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3436 for(
size_t i = count; i--; )
3440 VmaFree(pAllocationCallbacks, ptr);
3445 template<
typename T>
3446 class VmaStlAllocator
3449 const VkAllocationCallbacks*
const m_pCallbacks;
3450 typedef T value_type;
3452 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3453 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3455 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3456 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3458 template<
typename U>
3459 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3461 return m_pCallbacks == rhs.m_pCallbacks;
3463 template<
typename U>
3464 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3466 return m_pCallbacks != rhs.m_pCallbacks;
3469 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3472 #if VMA_USE_STL_VECTOR 3474 #define VmaVector std::vector 3476 template<
typename T,
typename allocatorT>
3477 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3479 vec.insert(vec.begin() + index, item);
3482 template<
typename T,
typename allocatorT>
3483 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3485 vec.erase(vec.begin() + index);
3488 #else // #if VMA_USE_STL_VECTOR 3493 template<
typename T,
typename AllocatorT>
3497 typedef T value_type;
3499 VmaVector(
const AllocatorT& allocator) :
3500 m_Allocator(allocator),
3507 VmaVector(
size_t count,
const AllocatorT& allocator) :
3508 m_Allocator(allocator),
3509 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3515 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3516 m_Allocator(src.m_Allocator),
3517 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3518 m_Count(src.m_Count),
3519 m_Capacity(src.m_Count)
3523 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3529 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3532 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3536 resize(rhs.m_Count);
3539 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3545 bool empty()
const {
return m_Count == 0; }
3546 size_t size()
const {
return m_Count; }
3547 T* data() {
return m_pArray; }
3548 const T* data()
const {
return m_pArray; }
3550 T& operator[](
size_t index)
3552 VMA_HEAVY_ASSERT(index < m_Count);
3553 return m_pArray[index];
3555 const T& operator[](
size_t index)
const 3557 VMA_HEAVY_ASSERT(index < m_Count);
3558 return m_pArray[index];
3563 VMA_HEAVY_ASSERT(m_Count > 0);
3566 const T& front()
const 3568 VMA_HEAVY_ASSERT(m_Count > 0);
3573 VMA_HEAVY_ASSERT(m_Count > 0);
3574 return m_pArray[m_Count - 1];
3576 const T& back()
const 3578 VMA_HEAVY_ASSERT(m_Count > 0);
3579 return m_pArray[m_Count - 1];
3582 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3584 newCapacity = VMA_MAX(newCapacity, m_Count);
3586 if((newCapacity < m_Capacity) && !freeMemory)
3588 newCapacity = m_Capacity;
3591 if(newCapacity != m_Capacity)
3593 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3596 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3598 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3599 m_Capacity = newCapacity;
3600 m_pArray = newArray;
3604 void resize(
size_t newCount,
bool freeMemory =
false)
3606 size_t newCapacity = m_Capacity;
3607 if(newCount > m_Capacity)
3609 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3613 newCapacity = newCount;
3616 if(newCapacity != m_Capacity)
3618 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3619 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3620 if(elementsToCopy != 0)
3622 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3624 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3625 m_Capacity = newCapacity;
3626 m_pArray = newArray;
3632 void clear(
bool freeMemory =
false)
3634 resize(0, freeMemory);
3637 void insert(
size_t index,
const T& src)
3639 VMA_HEAVY_ASSERT(index <= m_Count);
3640 const size_t oldCount = size();
3641 resize(oldCount + 1);
3642 if(index < oldCount)
3644 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3646 m_pArray[index] = src;
3649 void remove(
size_t index)
3651 VMA_HEAVY_ASSERT(index < m_Count);
3652 const size_t oldCount = size();
3653 if(index < oldCount - 1)
3655 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3657 resize(oldCount - 1);
3660 void push_back(
const T& src)
3662 const size_t newIndex = size();
3663 resize(newIndex + 1);
3664 m_pArray[newIndex] = src;
3669 VMA_HEAVY_ASSERT(m_Count > 0);
3673 void push_front(
const T& src)
3680 VMA_HEAVY_ASSERT(m_Count > 0);
3684 typedef T* iterator;
3686 iterator begin() {
return m_pArray; }
3687 iterator end() {
return m_pArray + m_Count; }
3690 AllocatorT m_Allocator;
3696 template<
typename T,
typename allocatorT>
3697 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3699 vec.insert(index, item);
3702 template<
typename T,
typename allocatorT>
3703 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3708 #endif // #if VMA_USE_STL_VECTOR 3710 template<
typename CmpLess,
typename VectorT>
3711 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3713 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3715 vector.data() + vector.size(),
3717 CmpLess()) - vector.data();
3718 VmaVectorInsert(vector, indexToInsert, value);
3719 return indexToInsert;
3722 template<
typename CmpLess,
typename VectorT>
3723 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3726 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3731 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3733 size_t indexToRemove = it - vector.begin();
3734 VmaVectorRemove(vector, indexToRemove);
3740 template<
typename CmpLess,
typename IterT,
typename KeyT>
3741 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3744 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3745 beg, end, value, comparator);
3747 (!comparator(*it, value) && !comparator(value, *it)))
3762 template<
typename T>
3763 class VmaPoolAllocator
3765 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3767 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3768 ~VmaPoolAllocator();
3776 uint32_t NextFreeIndex;
3783 uint32_t FirstFreeIndex;
3786 const VkAllocationCallbacks* m_pAllocationCallbacks;
3787 size_t m_ItemsPerBlock;
3788 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3790 ItemBlock& CreateNewBlock();
3793 template<
typename T>
3794 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3795 m_pAllocationCallbacks(pAllocationCallbacks),
3796 m_ItemsPerBlock(itemsPerBlock),
3797 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3799 VMA_ASSERT(itemsPerBlock > 0);
3802 template<
typename T>
3803 VmaPoolAllocator<T>::~VmaPoolAllocator()
3808 template<
typename T>
3809 void VmaPoolAllocator<T>::Clear()
3811 for(
size_t i = m_ItemBlocks.size(); i--; )
3812 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3813 m_ItemBlocks.clear();
3816 template<
typename T>
3817 T* VmaPoolAllocator<T>::Alloc()
3819 for(
size_t i = m_ItemBlocks.size(); i--; )
3821 ItemBlock& block = m_ItemBlocks[i];
3823 if(block.FirstFreeIndex != UINT32_MAX)
3825 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3826 block.FirstFreeIndex = pItem->NextFreeIndex;
3827 return &pItem->Value;
3832 ItemBlock& newBlock = CreateNewBlock();
3833 Item*
const pItem = &newBlock.pItems[0];
3834 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3835 return &pItem->Value;
3838 template<
typename T>
3839 void VmaPoolAllocator<T>::Free(T* ptr)
3842 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3844 ItemBlock& block = m_ItemBlocks[i];
3848 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3851 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3853 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3854 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3855 block.FirstFreeIndex = index;
3859 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3862 template<
typename T>
3863 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3865 ItemBlock newBlock = {
3866 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3868 m_ItemBlocks.push_back(newBlock);
3871 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3872 newBlock.pItems[i].NextFreeIndex = i + 1;
3873 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3874 return m_ItemBlocks.back();
3880 #if VMA_USE_STL_LIST 3882 #define VmaList std::list 3884 #else // #if VMA_USE_STL_LIST 3886 template<
typename T>
3895 template<
typename T>
3898 VMA_CLASS_NO_COPY(VmaRawList)
3900 typedef VmaListItem<T> ItemType;
3902 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3906 size_t GetCount()
const {
return m_Count; }
3907 bool IsEmpty()
const {
return m_Count == 0; }
3909 ItemType* Front() {
return m_pFront; }
3910 const ItemType* Front()
const {
return m_pFront; }
3911 ItemType* Back() {
return m_pBack; }
3912 const ItemType* Back()
const {
return m_pBack; }
3914 ItemType* PushBack();
3915 ItemType* PushFront();
3916 ItemType* PushBack(
const T& value);
3917 ItemType* PushFront(
const T& value);
3922 ItemType* InsertBefore(ItemType* pItem);
3924 ItemType* InsertAfter(ItemType* pItem);
3926 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3927 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3929 void Remove(ItemType* pItem);
3932 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3933 VmaPoolAllocator<ItemType> m_ItemAllocator;
3939 template<
typename T>
3940 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3941 m_pAllocationCallbacks(pAllocationCallbacks),
3942 m_ItemAllocator(pAllocationCallbacks, 128),
3949 template<
typename T>
3950 VmaRawList<T>::~VmaRawList()
3956 template<
typename T>
3957 void VmaRawList<T>::Clear()
3959 if(IsEmpty() ==
false)
3961 ItemType* pItem = m_pBack;
3962 while(pItem != VMA_NULL)
3964 ItemType*
const pPrevItem = pItem->pPrev;
3965 m_ItemAllocator.Free(pItem);
3968 m_pFront = VMA_NULL;
3974 template<
typename T>
3975 VmaListItem<T>* VmaRawList<T>::PushBack()
3977 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3978 pNewItem->pNext = VMA_NULL;
3981 pNewItem->pPrev = VMA_NULL;
3982 m_pFront = pNewItem;
3988 pNewItem->pPrev = m_pBack;
3989 m_pBack->pNext = pNewItem;
3996 template<
typename T>
3997 VmaListItem<T>* VmaRawList<T>::PushFront()
3999 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4000 pNewItem->pPrev = VMA_NULL;
4003 pNewItem->pNext = VMA_NULL;
4004 m_pFront = pNewItem;
4010 pNewItem->pNext = m_pFront;
4011 m_pFront->pPrev = pNewItem;
4012 m_pFront = pNewItem;
4018 template<
typename T>
4019 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4021 ItemType*
const pNewItem = PushBack();
4022 pNewItem->Value = value;
4026 template<
typename T>
4027 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4029 ItemType*
const pNewItem = PushFront();
4030 pNewItem->Value = value;
4034 template<
typename T>
4035 void VmaRawList<T>::PopBack()
4037 VMA_HEAVY_ASSERT(m_Count > 0);
4038 ItemType*
const pBackItem = m_pBack;
4039 ItemType*
const pPrevItem = pBackItem->pPrev;
4040 if(pPrevItem != VMA_NULL)
4042 pPrevItem->pNext = VMA_NULL;
4044 m_pBack = pPrevItem;
4045 m_ItemAllocator.Free(pBackItem);
4049 template<
typename T>
4050 void VmaRawList<T>::PopFront()
4052 VMA_HEAVY_ASSERT(m_Count > 0);
4053 ItemType*
const pFrontItem = m_pFront;
4054 ItemType*
const pNextItem = pFrontItem->pNext;
4055 if(pNextItem != VMA_NULL)
4057 pNextItem->pPrev = VMA_NULL;
4059 m_pFront = pNextItem;
4060 m_ItemAllocator.Free(pFrontItem);
4064 template<
typename T>
4065 void VmaRawList<T>::Remove(ItemType* pItem)
4067 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4068 VMA_HEAVY_ASSERT(m_Count > 0);
4070 if(pItem->pPrev != VMA_NULL)
4072 pItem->pPrev->pNext = pItem->pNext;
4076 VMA_HEAVY_ASSERT(m_pFront == pItem);
4077 m_pFront = pItem->pNext;
4080 if(pItem->pNext != VMA_NULL)
4082 pItem->pNext->pPrev = pItem->pPrev;
4086 VMA_HEAVY_ASSERT(m_pBack == pItem);
4087 m_pBack = pItem->pPrev;
4090 m_ItemAllocator.Free(pItem);
4094 template<
typename T>
4095 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4097 if(pItem != VMA_NULL)
4099 ItemType*
const prevItem = pItem->pPrev;
4100 ItemType*
const newItem = m_ItemAllocator.Alloc();
4101 newItem->pPrev = prevItem;
4102 newItem->pNext = pItem;
4103 pItem->pPrev = newItem;
4104 if(prevItem != VMA_NULL)
4106 prevItem->pNext = newItem;
4110 VMA_HEAVY_ASSERT(m_pFront == pItem);
4120 template<
typename T>
4121 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4123 if(pItem != VMA_NULL)
4125 ItemType*
const nextItem = pItem->pNext;
4126 ItemType*
const newItem = m_ItemAllocator.Alloc();
4127 newItem->pNext = nextItem;
4128 newItem->pPrev = pItem;
4129 pItem->pNext = newItem;
4130 if(nextItem != VMA_NULL)
4132 nextItem->pPrev = newItem;
4136 VMA_HEAVY_ASSERT(m_pBack == pItem);
4146 template<
typename T>
4147 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4149 ItemType*
const newItem = InsertBefore(pItem);
4150 newItem->Value = value;
4154 template<
typename T>
4155 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4157 ItemType*
const newItem = InsertAfter(pItem);
4158 newItem->Value = value;
4162 template<
typename T,
typename AllocatorT>
4165 VMA_CLASS_NO_COPY(VmaList)
4176 T& operator*()
const 4178 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4179 return m_pItem->Value;
4181 T* operator->()
const 4183 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4184 return &m_pItem->Value;
4187 iterator& operator++()
4189 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4190 m_pItem = m_pItem->pNext;
4193 iterator& operator--()
4195 if(m_pItem != VMA_NULL)
4197 m_pItem = m_pItem->pPrev;
4201 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4202 m_pItem = m_pList->Back();
4207 iterator operator++(
int)
4209 iterator result = *
this;
4213 iterator operator--(
int)
4215 iterator result = *
this;
4220 bool operator==(
const iterator& rhs)
const 4222 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4223 return m_pItem == rhs.m_pItem;
4225 bool operator!=(
const iterator& rhs)
const 4227 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4228 return m_pItem != rhs.m_pItem;
4232 VmaRawList<T>* m_pList;
4233 VmaListItem<T>* m_pItem;
4235 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4241 friend class VmaList<T, AllocatorT>;
4244 class const_iterator
4253 const_iterator(
const iterator& src) :
4254 m_pList(src.m_pList),
4255 m_pItem(src.m_pItem)
4259 const T& operator*()
const 4261 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4262 return m_pItem->Value;
4264 const T* operator->()
const 4266 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4267 return &m_pItem->Value;
4270 const_iterator& operator++()
4272 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4273 m_pItem = m_pItem->pNext;
4276 const_iterator& operator--()
4278 if(m_pItem != VMA_NULL)
4280 m_pItem = m_pItem->pPrev;
4284 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4285 m_pItem = m_pList->Back();
4290 const_iterator operator++(
int)
4292 const_iterator result = *
this;
4296 const_iterator operator--(
int)
4298 const_iterator result = *
this;
4303 bool operator==(
const const_iterator& rhs)
const 4305 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4306 return m_pItem == rhs.m_pItem;
4308 bool operator!=(
const const_iterator& rhs)
const 4310 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4311 return m_pItem != rhs.m_pItem;
4315 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4321 const VmaRawList<T>* m_pList;
4322 const VmaListItem<T>* m_pItem;
4324 friend class VmaList<T, AllocatorT>;
4327 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4329 bool empty()
const {
return m_RawList.IsEmpty(); }
4330 size_t size()
const {
return m_RawList.GetCount(); }
4332 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4333 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4335 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4336 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4338 void clear() { m_RawList.Clear(); }
4339 void push_back(
const T& value) { m_RawList.PushBack(value); }
4340 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4341 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4344 VmaRawList<T> m_RawList;
4347 #endif // #if VMA_USE_STL_LIST 4355 #if VMA_USE_STL_UNORDERED_MAP 4357 #define VmaPair std::pair 4359 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4360 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4362 #else // #if VMA_USE_STL_UNORDERED_MAP 4364 template<
typename T1,
typename T2>
4370 VmaPair() : first(), second() { }
4371 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4377 template<
typename KeyT,
typename ValueT>
4381 typedef VmaPair<KeyT, ValueT> PairType;
4382 typedef PairType* iterator;
4384 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4386 iterator begin() {
return m_Vector.begin(); }
4387 iterator end() {
return m_Vector.end(); }
4389 void insert(
const PairType& pair);
4390 iterator find(
const KeyT& key);
4391 void erase(iterator it);
4394 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4397 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4399 template<
typename FirstT,
typename SecondT>
4400 struct VmaPairFirstLess
4402 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4404 return lhs.first < rhs.first;
4406 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4408 return lhs.first < rhsFirst;
4412 template<
typename KeyT,
typename ValueT>
4413 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4415 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4417 m_Vector.data() + m_Vector.size(),
4419 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4420 VmaVectorInsert(m_Vector, indexToInsert, pair);
4423 template<
typename KeyT,
typename ValueT>
4424 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4426 PairType* it = VmaBinaryFindFirstNotLess(
4428 m_Vector.data() + m_Vector.size(),
4430 VmaPairFirstLess<KeyT, ValueT>());
4431 if((it != m_Vector.end()) && (it->first == key))
4437 return m_Vector.end();
4441 template<
typename KeyT,
typename ValueT>
4442 void VmaMap<KeyT, ValueT>::erase(iterator it)
4444 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4447 #endif // #if VMA_USE_STL_UNORDERED_MAP 4453 class VmaDeviceMemoryBlock;
4455 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4457 struct VmaAllocation_T
4459 VMA_CLASS_NO_COPY(VmaAllocation_T)
4461 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4465 FLAG_USER_DATA_STRING = 0x01,
4469 enum ALLOCATION_TYPE
4471 ALLOCATION_TYPE_NONE,
4472 ALLOCATION_TYPE_BLOCK,
4473 ALLOCATION_TYPE_DEDICATED,
4476 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4479 m_pUserData(VMA_NULL),
4480 m_LastUseFrameIndex(currentFrameIndex),
4481 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4482 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4484 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4486 #if VMA_STATS_STRING_ENABLED 4487 m_CreationFrameIndex = currentFrameIndex;
4488 m_BufferImageUsage = 0;
4494 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4497 VMA_ASSERT(m_pUserData == VMA_NULL);
4500 void InitBlockAllocation(
4502 VmaDeviceMemoryBlock* block,
4503 VkDeviceSize offset,
4504 VkDeviceSize alignment,
4506 VmaSuballocationType suballocationType,
4510 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4511 VMA_ASSERT(block != VMA_NULL);
4512 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4513 m_Alignment = alignment;
4515 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4516 m_SuballocationType = (uint8_t)suballocationType;
4517 m_BlockAllocation.m_hPool = hPool;
4518 m_BlockAllocation.m_Block = block;
4519 m_BlockAllocation.m_Offset = offset;
4520 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4525 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4526 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4527 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4528 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4529 m_BlockAllocation.m_Block = VMA_NULL;
4530 m_BlockAllocation.m_Offset = 0;
4531 m_BlockAllocation.m_CanBecomeLost =
true;
4534 void ChangeBlockAllocation(
4536 VmaDeviceMemoryBlock* block,
4537 VkDeviceSize offset);
4539 void ChangeSize(VkDeviceSize newSize);
4542 void InitDedicatedAllocation(
4543 uint32_t memoryTypeIndex,
4544 VkDeviceMemory hMemory,
4545 VmaSuballocationType suballocationType,
4549 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4550 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4551 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4554 m_SuballocationType = (uint8_t)suballocationType;
4555 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4556 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4557 m_DedicatedAllocation.m_hMemory = hMemory;
4558 m_DedicatedAllocation.m_pMappedData = pMappedData;
4561 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4562 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4563 VkDeviceSize GetSize()
const {
return m_Size; }
4564 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4565 void* GetUserData()
const {
return m_pUserData; }
4566 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4567 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4569 VmaDeviceMemoryBlock* GetBlock()
const 4571 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4572 return m_BlockAllocation.m_Block;
4574 VkDeviceSize GetOffset()
const;
4575 VkDeviceMemory GetMemory()
const;
4576 uint32_t GetMemoryTypeIndex()
const;
4577 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4578 void* GetMappedData()
const;
4579 bool CanBecomeLost()
const;
4582 uint32_t GetLastUseFrameIndex()
const 4584 return m_LastUseFrameIndex.load();
4586 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4588 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4598 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4600 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4602 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4613 void BlockAllocMap();
4614 void BlockAllocUnmap();
4615 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4618 #if VMA_STATS_STRING_ENABLED 4619 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4620 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4622 void InitBufferImageUsage(uint32_t bufferImageUsage)
4624 VMA_ASSERT(m_BufferImageUsage == 0);
4625 m_BufferImageUsage = bufferImageUsage;
4628 void PrintParameters(
class VmaJsonWriter& json)
const;
4632 VkDeviceSize m_Alignment;
4633 VkDeviceSize m_Size;
4635 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4637 uint8_t m_SuballocationType;
4644 struct BlockAllocation
4647 VmaDeviceMemoryBlock* m_Block;
4648 VkDeviceSize m_Offset;
4649 bool m_CanBecomeLost;
4653 struct DedicatedAllocation
4655 uint32_t m_MemoryTypeIndex;
4656 VkDeviceMemory m_hMemory;
4657 void* m_pMappedData;
4663 BlockAllocation m_BlockAllocation;
4665 DedicatedAllocation m_DedicatedAllocation;
4668 #if VMA_STATS_STRING_ENABLED 4669 uint32_t m_CreationFrameIndex;
4670 uint32_t m_BufferImageUsage;
4680 struct VmaSuballocation
4682 VkDeviceSize offset;
4685 VmaSuballocationType type;
4689 struct VmaSuballocationOffsetLess
4691 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4693 return lhs.offset < rhs.offset;
4696 struct VmaSuballocationOffsetGreater
4698 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4700 return lhs.offset > rhs.offset;
4704 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4707 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4722 struct VmaAllocationRequest
4724 VkDeviceSize offset;
4725 VkDeviceSize sumFreeSize;
4726 VkDeviceSize sumItemSize;
4727 VmaSuballocationList::iterator item;
4728 size_t itemsToMakeLostCount;
4731 VkDeviceSize CalcCost()
const 4733 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4741 class VmaBlockMetadata
4745 virtual ~VmaBlockMetadata() { }
4746 virtual void Init(VkDeviceSize size) { m_Size = size; }
4749 virtual bool Validate()
const = 0;
4750 VkDeviceSize GetSize()
const {
return m_Size; }
4751 virtual size_t GetAllocationCount()
const = 0;
4752 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4753 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4755 virtual bool IsEmpty()
const = 0;
4757 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4759 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4761 #if VMA_STATS_STRING_ENABLED 4762 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4768 virtual bool CreateAllocationRequest(
4769 uint32_t currentFrameIndex,
4770 uint32_t frameInUseCount,
4771 VkDeviceSize bufferImageGranularity,
4772 VkDeviceSize allocSize,
4773 VkDeviceSize allocAlignment,
4775 VmaSuballocationType allocType,
4776 bool canMakeOtherLost,
4778 VmaAllocationRequest* pAllocationRequest) = 0;
4780 virtual bool MakeRequestedAllocationsLost(
4781 uint32_t currentFrameIndex,
4782 uint32_t frameInUseCount,
4783 VmaAllocationRequest* pAllocationRequest) = 0;
4785 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4787 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4791 const VmaAllocationRequest& request,
4792 VmaSuballocationType type,
4793 VkDeviceSize allocSize,
4799 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4802 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
4805 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
4807 #if VMA_STATS_STRING_ENABLED 4808 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4809 VkDeviceSize unusedBytes,
4810 size_t allocationCount,
4811 size_t unusedRangeCount)
const;
4812 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4813 VkDeviceSize offset,
4815 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4816 VkDeviceSize offset,
4817 VkDeviceSize size)
const;
4818 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4822 VkDeviceSize m_Size;
4823 const VkAllocationCallbacks* m_pAllocationCallbacks;
4826 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 4827 VMA_ASSERT(0 && "Validation failed: " #cond); \ 4831 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4833 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4836 virtual ~VmaBlockMetadata_Generic();
4837 virtual void Init(VkDeviceSize size);
4839 virtual bool Validate()
const;
4840 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4841 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4842 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4843 virtual bool IsEmpty()
const;
4845 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4846 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4848 #if VMA_STATS_STRING_ENABLED 4849 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4852 virtual bool CreateAllocationRequest(
4853 uint32_t currentFrameIndex,
4854 uint32_t frameInUseCount,
4855 VkDeviceSize bufferImageGranularity,
4856 VkDeviceSize allocSize,
4857 VkDeviceSize allocAlignment,
4859 VmaSuballocationType allocType,
4860 bool canMakeOtherLost,
4862 VmaAllocationRequest* pAllocationRequest);
4864 virtual bool MakeRequestedAllocationsLost(
4865 uint32_t currentFrameIndex,
4866 uint32_t frameInUseCount,
4867 VmaAllocationRequest* pAllocationRequest);
4869 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4871 virtual VkResult CheckCorruption(
const void* pBlockData);
4874 const VmaAllocationRequest& request,
4875 VmaSuballocationType type,
4876 VkDeviceSize allocSize,
4881 virtual void FreeAtOffset(VkDeviceSize offset);
4883 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
4886 uint32_t m_FreeCount;
4887 VkDeviceSize m_SumFreeSize;
4888 VmaSuballocationList m_Suballocations;
4891 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4893 bool ValidateFreeSuballocationList()
const;
4897 bool CheckAllocation(
4898 uint32_t currentFrameIndex,
4899 uint32_t frameInUseCount,
4900 VkDeviceSize bufferImageGranularity,
4901 VkDeviceSize allocSize,
4902 VkDeviceSize allocAlignment,
4903 VmaSuballocationType allocType,
4904 VmaSuballocationList::const_iterator suballocItem,
4905 bool canMakeOtherLost,
4906 VkDeviceSize* pOffset,
4907 size_t* itemsToMakeLostCount,
4908 VkDeviceSize* pSumFreeSize,
4909 VkDeviceSize* pSumItemSize)
const;
4911 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4915 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4918 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4921 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5002 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5004 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5007 virtual ~VmaBlockMetadata_Linear();
5008 virtual void Init(VkDeviceSize size);
5010 virtual bool Validate()
const;
5011 virtual size_t GetAllocationCount()
const;
5012 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5013 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5014 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5016 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5017 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5019 #if VMA_STATS_STRING_ENABLED 5020 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5023 virtual bool CreateAllocationRequest(
5024 uint32_t currentFrameIndex,
5025 uint32_t frameInUseCount,
5026 VkDeviceSize bufferImageGranularity,
5027 VkDeviceSize allocSize,
5028 VkDeviceSize allocAlignment,
5030 VmaSuballocationType allocType,
5031 bool canMakeOtherLost,
5033 VmaAllocationRequest* pAllocationRequest);
5035 virtual bool MakeRequestedAllocationsLost(
5036 uint32_t currentFrameIndex,
5037 uint32_t frameInUseCount,
5038 VmaAllocationRequest* pAllocationRequest);
5040 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5042 virtual VkResult CheckCorruption(
const void* pBlockData);
5045 const VmaAllocationRequest& request,
5046 VmaSuballocationType type,
5047 VkDeviceSize allocSize,
5052 virtual void FreeAtOffset(VkDeviceSize offset);
5062 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5064 enum SECOND_VECTOR_MODE
5066 SECOND_VECTOR_EMPTY,
5071 SECOND_VECTOR_RING_BUFFER,
5077 SECOND_VECTOR_DOUBLE_STACK,
5080 VkDeviceSize m_SumFreeSize;
5081 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5082 uint32_t m_1stVectorIndex;
5083 SECOND_VECTOR_MODE m_2ndVectorMode;
5085 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5086 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5087 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5088 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5091 size_t m_1stNullItemsBeginCount;
5093 size_t m_1stNullItemsMiddleCount;
5095 size_t m_2ndNullItemsCount;
5097 bool ShouldCompact1st()
const;
5098 void CleanupAfterFree();
5112 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5114 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5117 virtual ~VmaBlockMetadata_Buddy();
5118 virtual void Init(VkDeviceSize size);
5120 virtual bool Validate()
const;
5121 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5122 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5123 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5124 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5126 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5127 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5129 #if VMA_STATS_STRING_ENABLED 5130 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5133 virtual bool CreateAllocationRequest(
5134 uint32_t currentFrameIndex,
5135 uint32_t frameInUseCount,
5136 VkDeviceSize bufferImageGranularity,
5137 VkDeviceSize allocSize,
5138 VkDeviceSize allocAlignment,
5140 VmaSuballocationType allocType,
5141 bool canMakeOtherLost,
5143 VmaAllocationRequest* pAllocationRequest);
5145 virtual bool MakeRequestedAllocationsLost(
5146 uint32_t currentFrameIndex,
5147 uint32_t frameInUseCount,
5148 VmaAllocationRequest* pAllocationRequest);
5150 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5152 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5155 const VmaAllocationRequest& request,
5156 VmaSuballocationType type,
5157 VkDeviceSize allocSize,
5161 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5162 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5165 static const VkDeviceSize MIN_NODE_SIZE = 32;
5166 static const size_t MAX_LEVELS = 30;
5168 struct ValidationContext
5170 size_t calculatedAllocationCount;
5171 size_t calculatedFreeCount;
5172 VkDeviceSize calculatedSumFreeSize;
5174 ValidationContext() :
5175 calculatedAllocationCount(0),
5176 calculatedFreeCount(0),
5177 calculatedSumFreeSize(0) { }
5182 VkDeviceSize offset;
5212 VkDeviceSize m_UsableSize;
5213 uint32_t m_LevelCount;
5219 } m_FreeList[MAX_LEVELS];
5221 size_t m_AllocationCount;
5225 VkDeviceSize m_SumFreeSize;
5227 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5228 void DeleteNode(Node* node);
5229 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5230 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5231 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5233 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5234 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5238 void AddToFreeListFront(uint32_t level, Node* node);
5242 void RemoveFromFreeList(uint32_t level, Node* node);
5244 #if VMA_STATS_STRING_ENABLED 5245 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5255 class VmaDeviceMemoryBlock
5257 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5259 VmaBlockMetadata* m_pMetadata;
5263 ~VmaDeviceMemoryBlock()
5265 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5266 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5272 uint32_t newMemoryTypeIndex,
5273 VkDeviceMemory newMemory,
5274 VkDeviceSize newSize,
5276 uint32_t algorithm);
5280 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5281 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5282 uint32_t GetId()
const {
return m_Id; }
5283 void* GetMappedData()
const {
return m_pMappedData; }
5286 bool Validate()
const;
5291 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5294 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5295 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5297 VkResult BindBufferMemory(
5301 VkResult BindImageMemory(
5307 uint32_t m_MemoryTypeIndex;
5309 VkDeviceMemory m_hMemory;
5314 uint32_t m_MapCount;
5315 void* m_pMappedData;
5318 struct VmaPointerLess
5320 bool operator()(
const void* lhs,
const void* rhs)
const 5326 class VmaDefragmentator;
5334 struct VmaBlockVector
5336 VMA_CLASS_NO_COPY(VmaBlockVector)
5340 uint32_t memoryTypeIndex,
5341 VkDeviceSize preferredBlockSize,
5342 size_t minBlockCount,
5343 size_t maxBlockCount,
5344 VkDeviceSize bufferImageGranularity,
5345 uint32_t frameInUseCount,
5347 bool explicitBlockSize,
5348 uint32_t algorithm);
5351 VkResult CreateMinBlocks();
5353 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5354 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5355 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5356 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5357 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5361 bool IsEmpty()
const {
return m_Blocks.empty(); }
5362 bool IsCorruptionDetectionEnabled()
const;
5366 uint32_t currentFrameIndex,
5368 VkDeviceSize alignment,
5370 VmaSuballocationType suballocType,
5379 #if VMA_STATS_STRING_ENABLED 5380 void PrintDetailedMap(
class VmaJsonWriter& json);
5383 void MakePoolAllocationsLost(
5384 uint32_t currentFrameIndex,
5385 size_t* pLostAllocationCount);
5386 VkResult CheckCorruption();
5388 VmaDefragmentator* EnsureDefragmentator(
5390 uint32_t currentFrameIndex);
5392 VkResult Defragment(
5394 VkDeviceSize& maxBytesToMove,
5395 uint32_t& maxAllocationsToMove);
5397 void DestroyDefragmentator();
5400 friend class VmaDefragmentator;
5403 const uint32_t m_MemoryTypeIndex;
5404 const VkDeviceSize m_PreferredBlockSize;
5405 const size_t m_MinBlockCount;
5406 const size_t m_MaxBlockCount;
5407 const VkDeviceSize m_BufferImageGranularity;
5408 const uint32_t m_FrameInUseCount;
5409 const bool m_IsCustomPool;
5410 const bool m_ExplicitBlockSize;
5411 const uint32_t m_Algorithm;
5412 bool m_HasEmptyBlock;
5415 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5419 VmaDefragmentator* m_pDefragmentator;
5420 uint32_t m_NextBlockId;
5422 VkDeviceSize CalcMaxBlockSize()
const;
5425 void Remove(VmaDeviceMemoryBlock* pBlock);
5429 void IncrementallySortBlocks();
5432 VkResult AllocateFromBlock(
5433 VmaDeviceMemoryBlock* pBlock,
5435 uint32_t currentFrameIndex,
5437 VkDeviceSize alignment,
5440 VmaSuballocationType suballocType,
5444 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5449 VMA_CLASS_NO_COPY(VmaPool_T)
5451 VmaBlockVector m_BlockVector;
5456 VkDeviceSize preferredBlockSize);
5459 uint32_t GetId()
const {
return m_Id; }
5460 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5462 #if VMA_STATS_STRING_ENABLED 5470 class VmaDefragmentator
5472 VMA_CLASS_NO_COPY(VmaDefragmentator)
5475 VmaBlockVector*
const m_pBlockVector;
5476 uint32_t m_CurrentFrameIndex;
5477 VkDeviceSize m_BytesMoved;
5478 uint32_t m_AllocationsMoved;
5480 struct AllocationInfo
5483 VkBool32* m_pChanged;
5486 m_hAllocation(VK_NULL_HANDLE),
5487 m_pChanged(VMA_NULL)
5492 struct AllocationInfoSizeGreater
5494 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5496 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5501 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5505 VmaDeviceMemoryBlock* m_pBlock;
5506 bool m_HasNonMovableAllocations;
5507 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5509 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5511 m_HasNonMovableAllocations(true),
5512 m_Allocations(pAllocationCallbacks),
5513 m_pMappedDataForDefragmentation(VMA_NULL)
5517 void CalcHasNonMovableAllocations()
5519 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5520 const size_t defragmentAllocCount = m_Allocations.size();
5521 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5524 void SortAllocationsBySizeDescecnding()
5526 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5529 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5534 void* m_pMappedDataForDefragmentation;
5537 struct BlockPointerLess
5539 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5541 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5543 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5545 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5551 struct BlockInfoCompareMoveDestination
5553 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5555 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5559 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5563 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5571 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5572 BlockInfoVector m_Blocks;
5574 VkResult DefragmentRound(
5575 VkDeviceSize maxBytesToMove,
5576 uint32_t maxAllocationsToMove);
5578 static bool MoveMakesSense(
5579 size_t dstBlockIndex, VkDeviceSize dstOffset,
5580 size_t srcBlockIndex, VkDeviceSize srcOffset);
5585 VmaBlockVector* pBlockVector,
5586 uint32_t currentFrameIndex);
5588 ~VmaDefragmentator();
5590 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5591 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5593 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5595 VkResult Defragment(
5596 VkDeviceSize maxBytesToMove,
5597 uint32_t maxAllocationsToMove);
5600 #if VMA_RECORDING_ENABLED 5607 void WriteConfiguration(
5608 const VkPhysicalDeviceProperties& devProps,
5609 const VkPhysicalDeviceMemoryProperties& memProps,
5610 bool dedicatedAllocationExtensionEnabled);
5613 void RecordCreateAllocator(uint32_t frameIndex);
5614 void RecordDestroyAllocator(uint32_t frameIndex);
5615 void RecordCreatePool(uint32_t frameIndex,
5618 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5619 void RecordAllocateMemory(uint32_t frameIndex,
5620 const VkMemoryRequirements& vkMemReq,
5623 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5624 const VkMemoryRequirements& vkMemReq,
5625 bool requiresDedicatedAllocation,
5626 bool prefersDedicatedAllocation,
5629 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5630 const VkMemoryRequirements& vkMemReq,
5631 bool requiresDedicatedAllocation,
5632 bool prefersDedicatedAllocation,
5635 void RecordFreeMemory(uint32_t frameIndex,
5637 void RecordResizeAllocation(
5638 uint32_t frameIndex,
5640 VkDeviceSize newSize);
5641 void RecordSetAllocationUserData(uint32_t frameIndex,
5643 const void* pUserData);
5644 void RecordCreateLostAllocation(uint32_t frameIndex,
5646 void RecordMapMemory(uint32_t frameIndex,
5648 void RecordUnmapMemory(uint32_t frameIndex,
5650 void RecordFlushAllocation(uint32_t frameIndex,
5651 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5652 void RecordInvalidateAllocation(uint32_t frameIndex,
5653 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5654 void RecordCreateBuffer(uint32_t frameIndex,
5655 const VkBufferCreateInfo& bufCreateInfo,
5658 void RecordCreateImage(uint32_t frameIndex,
5659 const VkImageCreateInfo& imageCreateInfo,
5662 void RecordDestroyBuffer(uint32_t frameIndex,
5664 void RecordDestroyImage(uint32_t frameIndex,
5666 void RecordTouchAllocation(uint32_t frameIndex,
5668 void RecordGetAllocationInfo(uint32_t frameIndex,
5670 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5680 class UserDataString
5684 const char* GetString()
const {
return m_Str; }
5694 VMA_MUTEX m_FileMutex;
5696 int64_t m_StartCounter;
5698 void GetBasicParams(CallParams& outParams);
5702 #endif // #if VMA_RECORDING_ENABLED 5705 struct VmaAllocator_T
5707 VMA_CLASS_NO_COPY(VmaAllocator_T)
5710 bool m_UseKhrDedicatedAllocation;
5712 bool m_AllocationCallbacksSpecified;
5713 VkAllocationCallbacks m_AllocationCallbacks;
5717 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5718 VMA_MUTEX m_HeapSizeLimitMutex;
5720 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5721 VkPhysicalDeviceMemoryProperties m_MemProps;
5724 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5727 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5728 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5729 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5735 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5737 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5741 return m_VulkanFunctions;
5744 VkDeviceSize GetBufferImageGranularity()
const 5747 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5748 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5751 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5752 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5754 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5756 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5757 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5760 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5762 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5763 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5766 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5768 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5769 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5770 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5773 bool IsIntegratedGpu()
const 5775 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5778 #if VMA_RECORDING_ENABLED 5779 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5782 void GetBufferMemoryRequirements(
5784 VkMemoryRequirements& memReq,
5785 bool& requiresDedicatedAllocation,
5786 bool& prefersDedicatedAllocation)
const;
5787 void GetImageMemoryRequirements(
5789 VkMemoryRequirements& memReq,
5790 bool& requiresDedicatedAllocation,
5791 bool& prefersDedicatedAllocation)
const;
5794 VkResult AllocateMemory(
5795 const VkMemoryRequirements& vkMemReq,
5796 bool requiresDedicatedAllocation,
5797 bool prefersDedicatedAllocation,
5798 VkBuffer dedicatedBuffer,
5799 VkImage dedicatedImage,
5801 VmaSuballocationType suballocType,
5807 VkResult ResizeAllocation(
5809 VkDeviceSize newSize);
5811 void CalculateStats(
VmaStats* pStats);
5813 #if VMA_STATS_STRING_ENABLED 5814 void PrintDetailedMap(
class VmaJsonWriter& json);
5817 VkResult Defragment(
5819 size_t allocationCount,
5820 VkBool32* pAllocationsChanged,
5828 void DestroyPool(
VmaPool pool);
5831 void SetCurrentFrameIndex(uint32_t frameIndex);
5832 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5834 void MakePoolAllocationsLost(
5836 size_t* pLostAllocationCount);
5837 VkResult CheckPoolCorruption(
VmaPool hPool);
5838 VkResult CheckCorruption(uint32_t memoryTypeBits);
5842 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5843 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5848 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5849 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5851 void FlushOrInvalidateAllocation(
5853 VkDeviceSize offset, VkDeviceSize size,
5854 VMA_CACHE_OPERATION op);
5856 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5859 VkDeviceSize m_PreferredLargeHeapBlockSize;
5861 VkPhysicalDevice m_PhysicalDevice;
5862 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5864 VMA_MUTEX m_PoolsMutex;
5866 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5867 uint32_t m_NextPoolId;
5871 #if VMA_RECORDING_ENABLED 5872 VmaRecorder* m_pRecorder;
5877 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5879 VkResult AllocateMemoryOfType(
5881 VkDeviceSize alignment,
5882 bool dedicatedAllocation,
5883 VkBuffer dedicatedBuffer,
5884 VkImage dedicatedImage,
5886 uint32_t memTypeIndex,
5887 VmaSuballocationType suballocType,
5891 VkResult AllocateDedicatedMemory(
5893 VmaSuballocationType suballocType,
5894 uint32_t memTypeIndex,
5896 bool isUserDataString,
5898 VkBuffer dedicatedBuffer,
5899 VkImage dedicatedImage,
5909 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5911 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5914 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5916 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5919 template<
typename T>
5922 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5925 template<
typename T>
5926 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5928 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5931 template<
typename T>
5932 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5937 VmaFree(hAllocator, ptr);
5941 template<
typename T>
5942 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5946 for(
size_t i = count; i--; )
5948 VmaFree(hAllocator, ptr);
5955 #if VMA_STATS_STRING_ENABLED 5957 class VmaStringBuilder
5960 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5961 size_t GetLength()
const {
return m_Data.size(); }
5962 const char* GetData()
const {
return m_Data.data(); }
5964 void Add(
char ch) { m_Data.push_back(ch); }
5965 void Add(
const char* pStr);
5966 void AddNewLine() { Add(
'\n'); }
5967 void AddNumber(uint32_t num);
5968 void AddNumber(uint64_t num);
5969 void AddPointer(
const void* ptr);
5972 VmaVector< char, VmaStlAllocator<char> > m_Data;
5975 void VmaStringBuilder::Add(
const char* pStr)
5977 const size_t strLen = strlen(pStr);
5980 const size_t oldCount = m_Data.size();
5981 m_Data.resize(oldCount + strLen);
5982 memcpy(m_Data.data() + oldCount, pStr, strLen);
5986 void VmaStringBuilder::AddNumber(uint32_t num)
5989 VmaUint32ToStr(buf,
sizeof(buf), num);
5993 void VmaStringBuilder::AddNumber(uint64_t num)
5996 VmaUint64ToStr(buf,
sizeof(buf), num);
6000 void VmaStringBuilder::AddPointer(
const void* ptr)
6003 VmaPtrToStr(buf,
sizeof(buf), ptr);
6007 #endif // #if VMA_STATS_STRING_ENABLED 6012 #if VMA_STATS_STRING_ENABLED 6016 VMA_CLASS_NO_COPY(VmaJsonWriter)
6018 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6021 void BeginObject(
bool singleLine =
false);
6024 void BeginArray(
bool singleLine =
false);
6027 void WriteString(
const char* pStr);
6028 void BeginString(
const char* pStr = VMA_NULL);
6029 void ContinueString(
const char* pStr);
6030 void ContinueString(uint32_t n);
6031 void ContinueString(uint64_t n);
6032 void ContinueString_Pointer(
const void* ptr);
6033 void EndString(
const char* pStr = VMA_NULL);
6035 void WriteNumber(uint32_t n);
6036 void WriteNumber(uint64_t n);
6037 void WriteBool(
bool b);
6041 static const char*
const INDENT;
6043 enum COLLECTION_TYPE
6045 COLLECTION_TYPE_OBJECT,
6046 COLLECTION_TYPE_ARRAY,
6050 COLLECTION_TYPE type;
6051 uint32_t valueCount;
6052 bool singleLineMode;
6055 VmaStringBuilder& m_SB;
6056 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6057 bool m_InsideString;
6059 void BeginValue(
bool isString);
6060 void WriteIndent(
bool oneLess =
false);
6063 const char*
const VmaJsonWriter::INDENT =
" ";
6065 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6067 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6068 m_InsideString(false)
6072 VmaJsonWriter::~VmaJsonWriter()
6074 VMA_ASSERT(!m_InsideString);
6075 VMA_ASSERT(m_Stack.empty());
6078 void VmaJsonWriter::BeginObject(
bool singleLine)
6080 VMA_ASSERT(!m_InsideString);
6086 item.type = COLLECTION_TYPE_OBJECT;
6087 item.valueCount = 0;
6088 item.singleLineMode = singleLine;
6089 m_Stack.push_back(item);
6092 void VmaJsonWriter::EndObject()
6094 VMA_ASSERT(!m_InsideString);
6099 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6103 void VmaJsonWriter::BeginArray(
bool singleLine)
6105 VMA_ASSERT(!m_InsideString);
6111 item.type = COLLECTION_TYPE_ARRAY;
6112 item.valueCount = 0;
6113 item.singleLineMode = singleLine;
6114 m_Stack.push_back(item);
6117 void VmaJsonWriter::EndArray()
6119 VMA_ASSERT(!m_InsideString);
6124 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6128 void VmaJsonWriter::WriteString(
const char* pStr)
6134 void VmaJsonWriter::BeginString(
const char* pStr)
6136 VMA_ASSERT(!m_InsideString);
6140 m_InsideString =
true;
6141 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6143 ContinueString(pStr);
6147 void VmaJsonWriter::ContinueString(
const char* pStr)
6149 VMA_ASSERT(m_InsideString);
6151 const size_t strLen = strlen(pStr);
6152 for(
size_t i = 0; i < strLen; ++i)
6185 VMA_ASSERT(0 &&
"Character not currently supported.");
6191 void VmaJsonWriter::ContinueString(uint32_t n)
6193 VMA_ASSERT(m_InsideString);
6197 void VmaJsonWriter::ContinueString(uint64_t n)
6199 VMA_ASSERT(m_InsideString);
6203 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6205 VMA_ASSERT(m_InsideString);
6206 m_SB.AddPointer(ptr);
6209 void VmaJsonWriter::EndString(
const char* pStr)
6211 VMA_ASSERT(m_InsideString);
6212 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6214 ContinueString(pStr);
6217 m_InsideString =
false;
6220 void VmaJsonWriter::WriteNumber(uint32_t n)
6222 VMA_ASSERT(!m_InsideString);
6227 void VmaJsonWriter::WriteNumber(uint64_t n)
6229 VMA_ASSERT(!m_InsideString);
6234 void VmaJsonWriter::WriteBool(
bool b)
6236 VMA_ASSERT(!m_InsideString);
6238 m_SB.Add(b ?
"true" :
"false");
6241 void VmaJsonWriter::WriteNull()
6243 VMA_ASSERT(!m_InsideString);
6248 void VmaJsonWriter::BeginValue(
bool isString)
6250 if(!m_Stack.empty())
6252 StackItem& currItem = m_Stack.back();
6253 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6254 currItem.valueCount % 2 == 0)
6256 VMA_ASSERT(isString);
6259 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6260 currItem.valueCount % 2 != 0)
6264 else if(currItem.valueCount > 0)
6273 ++currItem.valueCount;
6277 void VmaJsonWriter::WriteIndent(
bool oneLess)
6279 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6283 size_t count = m_Stack.size();
6284 if(count > 0 && oneLess)
6288 for(
size_t i = 0; i < count; ++i)
6295 #endif // #if VMA_STATS_STRING_ENABLED 6299 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
6301 if(IsUserDataString())
6303 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
6305 FreeUserDataString(hAllocator);
6307 if(pUserData != VMA_NULL)
6309 const char*
const newStrSrc = (
char*)pUserData;
6310 const size_t newStrLen = strlen(newStrSrc);
6311 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
6312 memcpy(newStrDst, newStrSrc, newStrLen + 1);
6313 m_pUserData = newStrDst;
6318 m_pUserData = pUserData;
6322 void VmaAllocation_T::ChangeBlockAllocation(
6324 VmaDeviceMemoryBlock* block,
6325 VkDeviceSize offset)
6327 VMA_ASSERT(block != VMA_NULL);
6328 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6331 if(block != m_BlockAllocation.m_Block)
6333 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
6334 if(IsPersistentMap())
6336 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
6337 block->Map(hAllocator, mapRefCount, VMA_NULL);
6340 m_BlockAllocation.m_Block = block;
6341 m_BlockAllocation.m_Offset = offset;
6344 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
6346 VMA_ASSERT(newSize > 0);
6350 VkDeviceSize VmaAllocation_T::GetOffset()
const 6354 case ALLOCATION_TYPE_BLOCK:
6355 return m_BlockAllocation.m_Offset;
6356 case ALLOCATION_TYPE_DEDICATED:
6364 VkDeviceMemory VmaAllocation_T::GetMemory()
const 6368 case ALLOCATION_TYPE_BLOCK:
6369 return m_BlockAllocation.m_Block->GetDeviceMemory();
6370 case ALLOCATION_TYPE_DEDICATED:
6371 return m_DedicatedAllocation.m_hMemory;
6374 return VK_NULL_HANDLE;
6378 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 6382 case ALLOCATION_TYPE_BLOCK:
6383 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
6384 case ALLOCATION_TYPE_DEDICATED:
6385 return m_DedicatedAllocation.m_MemoryTypeIndex;
6392 void* VmaAllocation_T::GetMappedData()
const 6396 case ALLOCATION_TYPE_BLOCK:
6399 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
6400 VMA_ASSERT(pBlockData != VMA_NULL);
6401 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
6408 case ALLOCATION_TYPE_DEDICATED:
6409 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
6410 return m_DedicatedAllocation.m_pMappedData;
6417 bool VmaAllocation_T::CanBecomeLost()
const 6421 case ALLOCATION_TYPE_BLOCK:
6422 return m_BlockAllocation.m_CanBecomeLost;
6423 case ALLOCATION_TYPE_DEDICATED:
6431 VmaPool VmaAllocation_T::GetPool()
const 6433 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6434 return m_BlockAllocation.m_hPool;
6437 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6439 VMA_ASSERT(CanBecomeLost());
6445 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6448 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6453 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6459 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6469 #if VMA_STATS_STRING_ENABLED 6472 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6481 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6483 json.WriteString(
"Type");
6484 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6486 json.WriteString(
"Size");
6487 json.WriteNumber(m_Size);
6489 if(m_pUserData != VMA_NULL)
6491 json.WriteString(
"UserData");
6492 if(IsUserDataString())
6494 json.WriteString((
const char*)m_pUserData);
6499 json.ContinueString_Pointer(m_pUserData);
6504 json.WriteString(
"CreationFrameIndex");
6505 json.WriteNumber(m_CreationFrameIndex);
6507 json.WriteString(
"LastUseFrameIndex");
6508 json.WriteNumber(GetLastUseFrameIndex());
6510 if(m_BufferImageUsage != 0)
6512 json.WriteString(
"Usage");
6513 json.WriteNumber(m_BufferImageUsage);
6519 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6521 VMA_ASSERT(IsUserDataString());
6522 if(m_pUserData != VMA_NULL)
6524 char*
const oldStr = (
char*)m_pUserData;
6525 const size_t oldStrLen = strlen(oldStr);
6526 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6527 m_pUserData = VMA_NULL;
6531 void VmaAllocation_T::BlockAllocMap()
6533 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6535 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6541 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6545 void VmaAllocation_T::BlockAllocUnmap()
6547 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6549 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6555 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6559 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6561 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6565 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6567 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6568 *ppData = m_DedicatedAllocation.m_pMappedData;
6574 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6575 return VK_ERROR_MEMORY_MAP_FAILED;
6580 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6581 hAllocator->m_hDevice,
6582 m_DedicatedAllocation.m_hMemory,
6587 if(result == VK_SUCCESS)
6589 m_DedicatedAllocation.m_pMappedData = *ppData;
6596 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6598 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6600 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6605 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6606 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6607 hAllocator->m_hDevice,
6608 m_DedicatedAllocation.m_hMemory);
6613 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6617 #if VMA_STATS_STRING_ENABLED 6619 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6623 json.WriteString(
"Blocks");
6626 json.WriteString(
"Allocations");
6629 json.WriteString(
"UnusedRanges");
6632 json.WriteString(
"UsedBytes");
6635 json.WriteString(
"UnusedBytes");
6640 json.WriteString(
"AllocationSize");
6641 json.BeginObject(
true);
6642 json.WriteString(
"Min");
6644 json.WriteString(
"Avg");
6646 json.WriteString(
"Max");
6653 json.WriteString(
"UnusedRangeSize");
6654 json.BeginObject(
true);
6655 json.WriteString(
"Min");
6657 json.WriteString(
"Avg");
6659 json.WriteString(
"Max");
6667 #endif // #if VMA_STATS_STRING_ENABLED 6669 struct VmaSuballocationItemSizeLess
6672 const VmaSuballocationList::iterator lhs,
6673 const VmaSuballocationList::iterator rhs)
const 6675 return lhs->size < rhs->size;
6678 const VmaSuballocationList::iterator lhs,
6679 VkDeviceSize rhsSize)
const 6681 return lhs->size < rhsSize;
6689 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
6691 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
6695 #if VMA_STATS_STRING_ENABLED 6697 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6698 VkDeviceSize unusedBytes,
6699 size_t allocationCount,
6700 size_t unusedRangeCount)
const 6704 json.WriteString(
"TotalBytes");
6705 json.WriteNumber(GetSize());
6707 json.WriteString(
"UnusedBytes");
6708 json.WriteNumber(unusedBytes);
6710 json.WriteString(
"Allocations");
6711 json.WriteNumber((uint64_t)allocationCount);
6713 json.WriteString(
"UnusedRanges");
6714 json.WriteNumber((uint64_t)unusedRangeCount);
6716 json.WriteString(
"Suballocations");
6720 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6721 VkDeviceSize offset,
6724 json.BeginObject(
true);
6726 json.WriteString(
"Offset");
6727 json.WriteNumber(offset);
6729 hAllocation->PrintParameters(json);
6734 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6735 VkDeviceSize offset,
6736 VkDeviceSize size)
const 6738 json.BeginObject(
true);
6740 json.WriteString(
"Offset");
6741 json.WriteNumber(offset);
6743 json.WriteString(
"Type");
6744 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6746 json.WriteString(
"Size");
6747 json.WriteNumber(size);
6752 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6758 #endif // #if VMA_STATS_STRING_ENABLED 6763 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6764 VmaBlockMetadata(hAllocator),
6767 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6768 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6772 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6776 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6778 VmaBlockMetadata::Init(size);
6781 m_SumFreeSize = size;
6783 VmaSuballocation suballoc = {};
6784 suballoc.offset = 0;
6785 suballoc.size = size;
6786 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6787 suballoc.hAllocation = VK_NULL_HANDLE;
6789 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
6790 m_Suballocations.push_back(suballoc);
6791 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6793 m_FreeSuballocationsBySize.push_back(suballocItem);
6796 bool VmaBlockMetadata_Generic::Validate()
const 6798 VMA_VALIDATE(!m_Suballocations.empty());
6801 VkDeviceSize calculatedOffset = 0;
6803 uint32_t calculatedFreeCount = 0;
6805 VkDeviceSize calculatedSumFreeSize = 0;
6808 size_t freeSuballocationsToRegister = 0;
6810 bool prevFree =
false;
6812 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6813 suballocItem != m_Suballocations.cend();
6816 const VmaSuballocation& subAlloc = *suballocItem;
6819 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
6821 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6823 VMA_VALIDATE(!prevFree || !currFree);
6825 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
6829 calculatedSumFreeSize += subAlloc.size;
6830 ++calculatedFreeCount;
6831 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6833 ++freeSuballocationsToRegister;
6837 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
6841 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
6842 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
6845 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
6848 calculatedOffset += subAlloc.size;
6849 prevFree = currFree;
6854 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
6856 VkDeviceSize lastSize = 0;
6857 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6859 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6862 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6864 VMA_VALIDATE(suballocItem->size >= lastSize);
6866 lastSize = suballocItem->size;
6870 VMA_VALIDATE(ValidateFreeSuballocationList());
6871 VMA_VALIDATE(calculatedOffset == GetSize());
6872 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
6873 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
6878 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6880 if(!m_FreeSuballocationsBySize.empty())
6882 return m_FreeSuballocationsBySize.back()->size;
6890 bool VmaBlockMetadata_Generic::IsEmpty()
const 6892 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6895 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6899 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6911 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6912 suballocItem != m_Suballocations.cend();
6915 const VmaSuballocation& suballoc = *suballocItem;
6916 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6929 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6931 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6933 inoutStats.
size += GetSize();
6940 #if VMA_STATS_STRING_ENABLED 6942 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6944 PrintDetailedMap_Begin(json,
6946 m_Suballocations.size() - (size_t)m_FreeCount,
6950 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6951 suballocItem != m_Suballocations.cend();
6952 ++suballocItem, ++i)
6954 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6956 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6960 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6964 PrintDetailedMap_End(json);
6967 #endif // #if VMA_STATS_STRING_ENABLED 6969 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6970 uint32_t currentFrameIndex,
6971 uint32_t frameInUseCount,
6972 VkDeviceSize bufferImageGranularity,
6973 VkDeviceSize allocSize,
6974 VkDeviceSize allocAlignment,
6976 VmaSuballocationType allocType,
6977 bool canMakeOtherLost,
6979 VmaAllocationRequest* pAllocationRequest)
6981 VMA_ASSERT(allocSize > 0);
6982 VMA_ASSERT(!upperAddress);
6983 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6984 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6985 VMA_HEAVY_ASSERT(Validate());
6988 if(canMakeOtherLost ==
false &&
6989 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6995 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6996 if(freeSuballocCount > 0)
7001 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7002 m_FreeSuballocationsBySize.data(),
7003 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7004 allocSize + 2 * VMA_DEBUG_MARGIN,
7005 VmaSuballocationItemSizeLess());
7006 size_t index = it - m_FreeSuballocationsBySize.data();
7007 for(; index < freeSuballocCount; ++index)
7012 bufferImageGranularity,
7016 m_FreeSuballocationsBySize[index],
7018 &pAllocationRequest->offset,
7019 &pAllocationRequest->itemsToMakeLostCount,
7020 &pAllocationRequest->sumFreeSize,
7021 &pAllocationRequest->sumItemSize))
7023 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7031 for(
size_t index = freeSuballocCount; index--; )
7036 bufferImageGranularity,
7040 m_FreeSuballocationsBySize[index],
7042 &pAllocationRequest->offset,
7043 &pAllocationRequest->itemsToMakeLostCount,
7044 &pAllocationRequest->sumFreeSize,
7045 &pAllocationRequest->sumItemSize))
7047 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7054 if(canMakeOtherLost)
7058 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
7059 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
7061 VmaAllocationRequest tmpAllocRequest = {};
7062 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7063 suballocIt != m_Suballocations.end();
7066 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7067 suballocIt->hAllocation->CanBecomeLost())
7072 bufferImageGranularity,
7078 &tmpAllocRequest.offset,
7079 &tmpAllocRequest.itemsToMakeLostCount,
7080 &tmpAllocRequest.sumFreeSize,
7081 &tmpAllocRequest.sumItemSize))
7083 tmpAllocRequest.item = suballocIt;
7085 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7088 *pAllocationRequest = tmpAllocRequest;
7094 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7103 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7104 uint32_t currentFrameIndex,
7105 uint32_t frameInUseCount,
7106 VmaAllocationRequest* pAllocationRequest)
7108 while(pAllocationRequest->itemsToMakeLostCount > 0)
7110 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7112 ++pAllocationRequest->item;
7114 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7115 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7116 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7117 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7119 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7120 --pAllocationRequest->itemsToMakeLostCount;
7128 VMA_HEAVY_ASSERT(Validate());
7129 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7130 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7135 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7137 uint32_t lostAllocationCount = 0;
7138 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7139 it != m_Suballocations.end();
7142 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7143 it->hAllocation->CanBecomeLost() &&
7144 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7146 it = FreeSuballocation(it);
7147 ++lostAllocationCount;
7150 return lostAllocationCount;
7153 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7155 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7156 it != m_Suballocations.end();
7159 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7161 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7163 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7164 return VK_ERROR_VALIDATION_FAILED_EXT;
7166 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7168 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7169 return VK_ERROR_VALIDATION_FAILED_EXT;
7177 void VmaBlockMetadata_Generic::Alloc(
7178 const VmaAllocationRequest& request,
7179 VmaSuballocationType type,
7180 VkDeviceSize allocSize,
7184 VMA_ASSERT(!upperAddress);
7185 VMA_ASSERT(request.item != m_Suballocations.end());
7186 VmaSuballocation& suballoc = *request.item;
7188 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7190 VMA_ASSERT(request.offset >= suballoc.offset);
7191 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7192 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7193 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7197 UnregisterFreeSuballocation(request.item);
7199 suballoc.offset = request.offset;
7200 suballoc.size = allocSize;
7201 suballoc.type = type;
7202 suballoc.hAllocation = hAllocation;
7207 VmaSuballocation paddingSuballoc = {};
7208 paddingSuballoc.offset = request.offset + allocSize;
7209 paddingSuballoc.size = paddingEnd;
7210 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7211 VmaSuballocationList::iterator next = request.item;
7213 const VmaSuballocationList::iterator paddingEndItem =
7214 m_Suballocations.insert(next, paddingSuballoc);
7215 RegisterFreeSuballocation(paddingEndItem);
7221 VmaSuballocation paddingSuballoc = {};
7222 paddingSuballoc.offset = request.offset - paddingBegin;
7223 paddingSuballoc.size = paddingBegin;
7224 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7225 const VmaSuballocationList::iterator paddingBeginItem =
7226 m_Suballocations.insert(request.item, paddingSuballoc);
7227 RegisterFreeSuballocation(paddingBeginItem);
7231 m_FreeCount = m_FreeCount - 1;
7232 if(paddingBegin > 0)
7240 m_SumFreeSize -= allocSize;
7243 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7245 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7246 suballocItem != m_Suballocations.end();
7249 VmaSuballocation& suballoc = *suballocItem;
7250 if(suballoc.hAllocation == allocation)
7252 FreeSuballocation(suballocItem);
7253 VMA_HEAVY_ASSERT(Validate());
7257 VMA_ASSERT(0 &&
"Not found!");
7260 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7262 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7263 suballocItem != m_Suballocations.end();
7266 VmaSuballocation& suballoc = *suballocItem;
7267 if(suballoc.offset == offset)
7269 FreeSuballocation(suballocItem);
7273 VMA_ASSERT(0 &&
"Not found!");
7276 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
7278 typedef VmaSuballocationList::iterator iter_type;
7279 for(iter_type suballocItem = m_Suballocations.begin();
7280 suballocItem != m_Suballocations.end();
7283 VmaSuballocation& suballoc = *suballocItem;
7284 if(suballoc.hAllocation == alloc)
7286 iter_type nextItem = suballocItem;
7290 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
7293 if(newSize < alloc->GetSize())
7295 const VkDeviceSize sizeDiff = suballoc.size - newSize;
7298 if(nextItem != m_Suballocations.end())
7301 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7304 UnregisterFreeSuballocation(nextItem);
7305 nextItem->offset -= sizeDiff;
7306 nextItem->size += sizeDiff;
7307 RegisterFreeSuballocation(nextItem);
7313 VmaSuballocation newFreeSuballoc;
7314 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
7315 newFreeSuballoc.offset = suballoc.offset + newSize;
7316 newFreeSuballoc.size = sizeDiff;
7317 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7318 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
7319 RegisterFreeSuballocation(newFreeSuballocIt);
7328 VmaSuballocation newFreeSuballoc;
7329 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
7330 newFreeSuballoc.offset = suballoc.offset + newSize;
7331 newFreeSuballoc.size = sizeDiff;
7332 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7333 m_Suballocations.push_back(newFreeSuballoc);
7335 iter_type newFreeSuballocIt = m_Suballocations.end();
7336 RegisterFreeSuballocation(--newFreeSuballocIt);
7341 suballoc.size = newSize;
7342 m_SumFreeSize += sizeDiff;
7347 const VkDeviceSize sizeDiff = newSize - suballoc.size;
7350 if(nextItem != m_Suballocations.end())
7353 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7356 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
7362 if(nextItem->size > sizeDiff)
7365 UnregisterFreeSuballocation(nextItem);
7366 nextItem->offset += sizeDiff;
7367 nextItem->size -= sizeDiff;
7368 RegisterFreeSuballocation(nextItem);
7374 UnregisterFreeSuballocation(nextItem);
7375 m_Suballocations.erase(nextItem);
7391 suballoc.size = newSize;
7392 m_SumFreeSize -= sizeDiff;
7399 VMA_ASSERT(0 &&
"Not found!");
7403 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 7405 VkDeviceSize lastSize = 0;
7406 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7408 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7410 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
7411 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7412 VMA_VALIDATE(it->size >= lastSize);
7413 lastSize = it->size;
7418 bool VmaBlockMetadata_Generic::CheckAllocation(
7419 uint32_t currentFrameIndex,
7420 uint32_t frameInUseCount,
7421 VkDeviceSize bufferImageGranularity,
7422 VkDeviceSize allocSize,
7423 VkDeviceSize allocAlignment,
7424 VmaSuballocationType allocType,
7425 VmaSuballocationList::const_iterator suballocItem,
7426 bool canMakeOtherLost,
7427 VkDeviceSize* pOffset,
7428 size_t* itemsToMakeLostCount,
7429 VkDeviceSize* pSumFreeSize,
7430 VkDeviceSize* pSumItemSize)
const 7432 VMA_ASSERT(allocSize > 0);
7433 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7434 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7435 VMA_ASSERT(pOffset != VMA_NULL);
7437 *itemsToMakeLostCount = 0;
7441 if(canMakeOtherLost)
7443 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7445 *pSumFreeSize = suballocItem->size;
7449 if(suballocItem->hAllocation->CanBecomeLost() &&
7450 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7452 ++*itemsToMakeLostCount;
7453 *pSumItemSize = suballocItem->size;
7462 if(GetSize() - suballocItem->offset < allocSize)
7468 *pOffset = suballocItem->offset;
7471 if(VMA_DEBUG_MARGIN > 0)
7473 *pOffset += VMA_DEBUG_MARGIN;
7477 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7481 if(bufferImageGranularity > 1)
7483 bool bufferImageGranularityConflict =
false;
7484 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7485 while(prevSuballocItem != m_Suballocations.cbegin())
7488 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7489 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7491 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7493 bufferImageGranularityConflict =
true;
7501 if(bufferImageGranularityConflict)
7503 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7509 if(*pOffset >= suballocItem->offset + suballocItem->size)
7515 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
7518 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7520 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
7522 if(suballocItem->offset + totalSize > GetSize())
7529 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7530 if(totalSize > suballocItem->size)
7532 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7533 while(remainingSize > 0)
7536 if(lastSuballocItem == m_Suballocations.cend())
7540 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7542 *pSumFreeSize += lastSuballocItem->size;
7546 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7547 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7548 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7550 ++*itemsToMakeLostCount;
7551 *pSumItemSize += lastSuballocItem->size;
7558 remainingSize = (lastSuballocItem->size < remainingSize) ?
7559 remainingSize - lastSuballocItem->size : 0;
7565 if(bufferImageGranularity > 1)
7567 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7569 while(nextSuballocItem != m_Suballocations.cend())
7571 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7572 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7574 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7576 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7577 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7578 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7580 ++*itemsToMakeLostCount;
7599 const VmaSuballocation& suballoc = *suballocItem;
7600 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7602 *pSumFreeSize = suballoc.size;
7605 if(suballoc.size < allocSize)
7611 *pOffset = suballoc.offset;
7614 if(VMA_DEBUG_MARGIN > 0)
7616 *pOffset += VMA_DEBUG_MARGIN;
7620 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7624 if(bufferImageGranularity > 1)
7626 bool bufferImageGranularityConflict =
false;
7627 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7628 while(prevSuballocItem != m_Suballocations.cbegin())
7631 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7632 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7634 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7636 bufferImageGranularityConflict =
true;
7644 if(bufferImageGranularityConflict)
7646 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7651 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7654 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7657 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7664 if(bufferImageGranularity > 1)
7666 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7668 while(nextSuballocItem != m_Suballocations.cend())
7670 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7671 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7673 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7692 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7694 VMA_ASSERT(item != m_Suballocations.end());
7695 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7697 VmaSuballocationList::iterator nextItem = item;
7699 VMA_ASSERT(nextItem != m_Suballocations.end());
7700 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7702 item->size += nextItem->size;
7704 m_Suballocations.erase(nextItem);
7707 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7710 VmaSuballocation& suballoc = *suballocItem;
7711 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7712 suballoc.hAllocation = VK_NULL_HANDLE;
7716 m_SumFreeSize += suballoc.size;
7719 bool mergeWithNext =
false;
7720 bool mergeWithPrev =
false;
7722 VmaSuballocationList::iterator nextItem = suballocItem;
7724 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7726 mergeWithNext =
true;
7729 VmaSuballocationList::iterator prevItem = suballocItem;
7730 if(suballocItem != m_Suballocations.begin())
7733 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7735 mergeWithPrev =
true;
7741 UnregisterFreeSuballocation(nextItem);
7742 MergeFreeWithNext(suballocItem);
7747 UnregisterFreeSuballocation(prevItem);
7748 MergeFreeWithNext(prevItem);
7749 RegisterFreeSuballocation(prevItem);
7754 RegisterFreeSuballocation(suballocItem);
7755 return suballocItem;
7759 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7761 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7762 VMA_ASSERT(item->size > 0);
7766 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7768 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7770 if(m_FreeSuballocationsBySize.empty())
7772 m_FreeSuballocationsBySize.push_back(item);
7776 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7784 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7786 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7787 VMA_ASSERT(item->size > 0);
7791 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7793 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7795 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7796 m_FreeSuballocationsBySize.data(),
7797 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7799 VmaSuballocationItemSizeLess());
7800 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7801 index < m_FreeSuballocationsBySize.size();
7804 if(m_FreeSuballocationsBySize[index] == item)
7806 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7809 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7811 VMA_ASSERT(0 &&
"Not found.");
7820 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7821 VmaBlockMetadata(hAllocator),
7823 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7824 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7825 m_1stVectorIndex(0),
7826 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7827 m_1stNullItemsBeginCount(0),
7828 m_1stNullItemsMiddleCount(0),
7829 m_2ndNullItemsCount(0)
7833 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7837 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7839 VmaBlockMetadata::Init(size);
7840 m_SumFreeSize = size;
7843 bool VmaBlockMetadata_Linear::Validate()
const 7845 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7846 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7848 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
7849 VMA_VALIDATE(!suballocations1st.empty() ||
7850 suballocations2nd.empty() ||
7851 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
7853 if(!suballocations1st.empty())
7856 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
7858 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
7860 if(!suballocations2nd.empty())
7863 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
7866 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
7867 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
7869 VkDeviceSize sumUsedSize = 0;
7870 const size_t suballoc1stCount = suballocations1st.size();
7871 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7873 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7875 const size_t suballoc2ndCount = suballocations2nd.size();
7876 size_t nullItem2ndCount = 0;
7877 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7879 const VmaSuballocation& suballoc = suballocations2nd[i];
7880 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7882 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7883 VMA_VALIDATE(suballoc.offset >= offset);
7887 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7888 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7889 sumUsedSize += suballoc.size;
7896 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7899 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7902 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7904 const VmaSuballocation& suballoc = suballocations1st[i];
7905 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
7906 suballoc.hAllocation == VK_NULL_HANDLE);
7909 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7911 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7913 const VmaSuballocation& suballoc = suballocations1st[i];
7914 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7916 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7917 VMA_VALIDATE(suballoc.offset >= offset);
7918 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
7922 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7923 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7924 sumUsedSize += suballoc.size;
7931 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7933 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
7935 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7937 const size_t suballoc2ndCount = suballocations2nd.size();
7938 size_t nullItem2ndCount = 0;
7939 for(
size_t i = suballoc2ndCount; i--; )
7941 const VmaSuballocation& suballoc = suballocations2nd[i];
7942 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7944 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7945 VMA_VALIDATE(suballoc.offset >= offset);
7949 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7950 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7951 sumUsedSize += suballoc.size;
7958 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7961 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7964 VMA_VALIDATE(offset <= GetSize());
7965 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
7970 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7972 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7973 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7976 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7978 const VkDeviceSize size = GetSize();
7990 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7992 switch(m_2ndVectorMode)
7994 case SECOND_VECTOR_EMPTY:
8000 const size_t suballocations1stCount = suballocations1st.size();
8001 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8002 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8003 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8005 firstSuballoc.offset,
8006 size - (lastSuballoc.offset + lastSuballoc.size));
8010 case SECOND_VECTOR_RING_BUFFER:
8015 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8016 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8017 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8018 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
8022 case SECOND_VECTOR_DOUBLE_STACK:
8027 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8028 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
8029 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
8030 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
8040 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 8042 const VkDeviceSize size = GetSize();
8043 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8044 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8045 const size_t suballoc1stCount = suballocations1st.size();
8046 const size_t suballoc2ndCount = suballocations2nd.size();
8057 VkDeviceSize lastOffset = 0;
8059 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8061 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8062 size_t nextAlloc2ndIndex = 0;
8063 while(lastOffset < freeSpace2ndTo1stEnd)
8066 while(nextAlloc2ndIndex < suballoc2ndCount &&
8067 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8069 ++nextAlloc2ndIndex;
8073 if(nextAlloc2ndIndex < suballoc2ndCount)
8075 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8078 if(lastOffset < suballoc.offset)
8081 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8095 lastOffset = suballoc.offset + suballoc.size;
8096 ++nextAlloc2ndIndex;
8102 if(lastOffset < freeSpace2ndTo1stEnd)
8104 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8112 lastOffset = freeSpace2ndTo1stEnd;
8117 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8118 const VkDeviceSize freeSpace1stTo2ndEnd =
8119 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8120 while(lastOffset < freeSpace1stTo2ndEnd)
8123 while(nextAlloc1stIndex < suballoc1stCount &&
8124 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8126 ++nextAlloc1stIndex;
8130 if(nextAlloc1stIndex < suballoc1stCount)
8132 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8135 if(lastOffset < suballoc.offset)
8138 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8152 lastOffset = suballoc.offset + suballoc.size;
8153 ++nextAlloc1stIndex;
8159 if(lastOffset < freeSpace1stTo2ndEnd)
8161 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8169 lastOffset = freeSpace1stTo2ndEnd;
8173 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8175 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8176 while(lastOffset < size)
8179 while(nextAlloc2ndIndex != SIZE_MAX &&
8180 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8182 --nextAlloc2ndIndex;
8186 if(nextAlloc2ndIndex != SIZE_MAX)
8188 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8191 if(lastOffset < suballoc.offset)
8194 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8208 lastOffset = suballoc.offset + suballoc.size;
8209 --nextAlloc2ndIndex;
8215 if(lastOffset < size)
8217 const VkDeviceSize unusedRangeSize = size - lastOffset;
8233 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8235 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8236 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8237 const VkDeviceSize size = GetSize();
8238 const size_t suballoc1stCount = suballocations1st.size();
8239 const size_t suballoc2ndCount = suballocations2nd.size();
8241 inoutStats.
size += size;
8243 VkDeviceSize lastOffset = 0;
8245 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8247 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8248 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8249 while(lastOffset < freeSpace2ndTo1stEnd)
8252 while(nextAlloc2ndIndex < suballoc2ndCount &&
8253 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8255 ++nextAlloc2ndIndex;
8259 if(nextAlloc2ndIndex < suballoc2ndCount)
8261 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8264 if(lastOffset < suballoc.offset)
8267 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8278 lastOffset = suballoc.offset + suballoc.size;
8279 ++nextAlloc2ndIndex;
8284 if(lastOffset < freeSpace2ndTo1stEnd)
8287 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8294 lastOffset = freeSpace2ndTo1stEnd;
8299 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8300 const VkDeviceSize freeSpace1stTo2ndEnd =
8301 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8302 while(lastOffset < freeSpace1stTo2ndEnd)
8305 while(nextAlloc1stIndex < suballoc1stCount &&
8306 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8308 ++nextAlloc1stIndex;
8312 if(nextAlloc1stIndex < suballoc1stCount)
8314 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8317 if(lastOffset < suballoc.offset)
8320 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8331 lastOffset = suballoc.offset + suballoc.size;
8332 ++nextAlloc1stIndex;
8337 if(lastOffset < freeSpace1stTo2ndEnd)
8340 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8347 lastOffset = freeSpace1stTo2ndEnd;
8351 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8353 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8354 while(lastOffset < size)
8357 while(nextAlloc2ndIndex != SIZE_MAX &&
8358 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8360 --nextAlloc2ndIndex;
8364 if(nextAlloc2ndIndex != SIZE_MAX)
8366 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8369 if(lastOffset < suballoc.offset)
8372 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8383 lastOffset = suballoc.offset + suballoc.size;
8384 --nextAlloc2ndIndex;
8389 if(lastOffset < size)
8392 const VkDeviceSize unusedRangeSize = size - lastOffset;
8405 #if VMA_STATS_STRING_ENABLED 8406 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 8408 const VkDeviceSize size = GetSize();
8409 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8410 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8411 const size_t suballoc1stCount = suballocations1st.size();
8412 const size_t suballoc2ndCount = suballocations2nd.size();
8416 size_t unusedRangeCount = 0;
8417 VkDeviceSize usedBytes = 0;
8419 VkDeviceSize lastOffset = 0;
8421 size_t alloc2ndCount = 0;
8422 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8424 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8425 size_t nextAlloc2ndIndex = 0;
8426 while(lastOffset < freeSpace2ndTo1stEnd)
8429 while(nextAlloc2ndIndex < suballoc2ndCount &&
8430 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8432 ++nextAlloc2ndIndex;
8436 if(nextAlloc2ndIndex < suballoc2ndCount)
8438 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8441 if(lastOffset < suballoc.offset)
8450 usedBytes += suballoc.size;
8453 lastOffset = suballoc.offset + suballoc.size;
8454 ++nextAlloc2ndIndex;
8459 if(lastOffset < freeSpace2ndTo1stEnd)
8466 lastOffset = freeSpace2ndTo1stEnd;
8471 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8472 size_t alloc1stCount = 0;
8473 const VkDeviceSize freeSpace1stTo2ndEnd =
8474 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8475 while(lastOffset < freeSpace1stTo2ndEnd)
8478 while(nextAlloc1stIndex < suballoc1stCount &&
8479 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8481 ++nextAlloc1stIndex;
8485 if(nextAlloc1stIndex < suballoc1stCount)
8487 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8490 if(lastOffset < suballoc.offset)
8499 usedBytes += suballoc.size;
8502 lastOffset = suballoc.offset + suballoc.size;
8503 ++nextAlloc1stIndex;
8508 if(lastOffset < size)
8515 lastOffset = freeSpace1stTo2ndEnd;
8519 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8521 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8522 while(lastOffset < size)
8525 while(nextAlloc2ndIndex != SIZE_MAX &&
8526 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8528 --nextAlloc2ndIndex;
8532 if(nextAlloc2ndIndex != SIZE_MAX)
8534 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8537 if(lastOffset < suballoc.offset)
8546 usedBytes += suballoc.size;
8549 lastOffset = suballoc.offset + suballoc.size;
8550 --nextAlloc2ndIndex;
8555 if(lastOffset < size)
8567 const VkDeviceSize unusedBytes = size - usedBytes;
8568 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8573 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8575 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8576 size_t nextAlloc2ndIndex = 0;
8577 while(lastOffset < freeSpace2ndTo1stEnd)
8580 while(nextAlloc2ndIndex < suballoc2ndCount &&
8581 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8583 ++nextAlloc2ndIndex;
8587 if(nextAlloc2ndIndex < suballoc2ndCount)
8589 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8592 if(lastOffset < suballoc.offset)
8595 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8596 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8601 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8604 lastOffset = suballoc.offset + suballoc.size;
8605 ++nextAlloc2ndIndex;
8610 if(lastOffset < freeSpace2ndTo1stEnd)
8613 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8614 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8618 lastOffset = freeSpace2ndTo1stEnd;
8623 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8624 while(lastOffset < freeSpace1stTo2ndEnd)
8627 while(nextAlloc1stIndex < suballoc1stCount &&
8628 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8630 ++nextAlloc1stIndex;
8634 if(nextAlloc1stIndex < suballoc1stCount)
8636 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8639 if(lastOffset < suballoc.offset)
8642 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8643 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8648 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8651 lastOffset = suballoc.offset + suballoc.size;
8652 ++nextAlloc1stIndex;
8657 if(lastOffset < freeSpace1stTo2ndEnd)
8660 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8661 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8665 lastOffset = freeSpace1stTo2ndEnd;
8669 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8671 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8672 while(lastOffset < size)
8675 while(nextAlloc2ndIndex != SIZE_MAX &&
8676 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8678 --nextAlloc2ndIndex;
8682 if(nextAlloc2ndIndex != SIZE_MAX)
8684 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8687 if(lastOffset < suballoc.offset)
8690 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8691 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8696 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8699 lastOffset = suballoc.offset + suballoc.size;
8700 --nextAlloc2ndIndex;
8705 if(lastOffset < size)
8708 const VkDeviceSize unusedRangeSize = size - lastOffset;
8709 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8718 PrintDetailedMap_End(json);
8720 #endif // #if VMA_STATS_STRING_ENABLED 8722 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8723 uint32_t currentFrameIndex,
8724 uint32_t frameInUseCount,
8725 VkDeviceSize bufferImageGranularity,
8726 VkDeviceSize allocSize,
8727 VkDeviceSize allocAlignment,
8729 VmaSuballocationType allocType,
8730 bool canMakeOtherLost,
8732 VmaAllocationRequest* pAllocationRequest)
8734 VMA_ASSERT(allocSize > 0);
8735 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8736 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8737 VMA_HEAVY_ASSERT(Validate());
8739 const VkDeviceSize size = GetSize();
8740 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8741 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8745 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8747 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8752 if(allocSize > size)
8756 VkDeviceSize resultBaseOffset = size - allocSize;
8757 if(!suballocations2nd.empty())
8759 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8760 resultBaseOffset = lastSuballoc.offset - allocSize;
8761 if(allocSize > lastSuballoc.offset)
8768 VkDeviceSize resultOffset = resultBaseOffset;
8771 if(VMA_DEBUG_MARGIN > 0)
8773 if(resultOffset < VMA_DEBUG_MARGIN)
8777 resultOffset -= VMA_DEBUG_MARGIN;
8781 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8785 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8787 bool bufferImageGranularityConflict =
false;
8788 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8790 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8791 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8793 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8795 bufferImageGranularityConflict =
true;
8803 if(bufferImageGranularityConflict)
8805 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8810 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8811 suballocations1st.back().offset + suballocations1st.back().size :
8813 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8817 if(bufferImageGranularity > 1)
8819 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8821 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8822 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8824 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8838 pAllocationRequest->offset = resultOffset;
8839 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8840 pAllocationRequest->sumItemSize = 0;
8842 pAllocationRequest->itemsToMakeLostCount = 0;
8848 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8852 VkDeviceSize resultBaseOffset = 0;
8853 if(!suballocations1st.empty())
8855 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8856 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8860 VkDeviceSize resultOffset = resultBaseOffset;
8863 if(VMA_DEBUG_MARGIN > 0)
8865 resultOffset += VMA_DEBUG_MARGIN;
8869 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8873 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8875 bool bufferImageGranularityConflict =
false;
8876 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8878 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8879 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8881 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8883 bufferImageGranularityConflict =
true;
8891 if(bufferImageGranularityConflict)
8893 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8897 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8898 suballocations2nd.back().offset : size;
8901 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8905 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8907 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8909 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8910 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8912 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8926 pAllocationRequest->offset = resultOffset;
8927 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8928 pAllocationRequest->sumItemSize = 0;
8930 pAllocationRequest->itemsToMakeLostCount = 0;
8937 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8939 VMA_ASSERT(!suballocations1st.empty());
8941 VkDeviceSize resultBaseOffset = 0;
8942 if(!suballocations2nd.empty())
8944 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8945 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8949 VkDeviceSize resultOffset = resultBaseOffset;
8952 if(VMA_DEBUG_MARGIN > 0)
8954 resultOffset += VMA_DEBUG_MARGIN;
8958 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8962 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8964 bool bufferImageGranularityConflict =
false;
8965 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8967 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8968 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8970 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8972 bufferImageGranularityConflict =
true;
8980 if(bufferImageGranularityConflict)
8982 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8986 pAllocationRequest->itemsToMakeLostCount = 0;
8987 pAllocationRequest->sumItemSize = 0;
8988 size_t index1st = m_1stNullItemsBeginCount;
8990 if(canMakeOtherLost)
8992 while(index1st < suballocations1st.size() &&
8993 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8996 const VmaSuballocation& suballoc = suballocations1st[index1st];
8997 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9003 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9004 if(suballoc.hAllocation->CanBecomeLost() &&
9005 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9007 ++pAllocationRequest->itemsToMakeLostCount;
9008 pAllocationRequest->sumItemSize += suballoc.size;
9020 if(bufferImageGranularity > 1)
9022 while(index1st < suballocations1st.size())
9024 const VmaSuballocation& suballoc = suballocations1st[index1st];
9025 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
9027 if(suballoc.hAllocation != VK_NULL_HANDLE)
9030 if(suballoc.hAllocation->CanBecomeLost() &&
9031 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9033 ++pAllocationRequest->itemsToMakeLostCount;
9034 pAllocationRequest->sumItemSize += suballoc.size;
9053 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
9054 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
9058 if(bufferImageGranularity > 1)
9060 for(
size_t nextSuballocIndex = index1st;
9061 nextSuballocIndex < suballocations1st.size();
9062 nextSuballocIndex++)
9064 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9065 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9067 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9081 pAllocationRequest->offset = resultOffset;
9082 pAllocationRequest->sumFreeSize =
9083 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
9085 - pAllocationRequest->sumItemSize;
9095 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
9096 uint32_t currentFrameIndex,
9097 uint32_t frameInUseCount,
9098 VmaAllocationRequest* pAllocationRequest)
9100 if(pAllocationRequest->itemsToMakeLostCount == 0)
9105 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
9107 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9108 size_t index1st = m_1stNullItemsBeginCount;
9109 size_t madeLostCount = 0;
9110 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
9112 VMA_ASSERT(index1st < suballocations1st.size());
9113 VmaSuballocation& suballoc = suballocations1st[index1st];
9114 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9116 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9117 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
9118 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9120 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9121 suballoc.hAllocation = VK_NULL_HANDLE;
9122 m_SumFreeSize += suballoc.size;
9123 ++m_1stNullItemsMiddleCount;
9140 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9142 uint32_t lostAllocationCount = 0;
9144 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9145 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9147 VmaSuballocation& suballoc = suballocations1st[i];
9148 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9149 suballoc.hAllocation->CanBecomeLost() &&
9150 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9152 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9153 suballoc.hAllocation = VK_NULL_HANDLE;
9154 ++m_1stNullItemsMiddleCount;
9155 m_SumFreeSize += suballoc.size;
9156 ++lostAllocationCount;
9160 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9161 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9163 VmaSuballocation& suballoc = suballocations2nd[i];
9164 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9165 suballoc.hAllocation->CanBecomeLost() &&
9166 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9168 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9169 suballoc.hAllocation = VK_NULL_HANDLE;
9170 ++m_2ndNullItemsCount;
9171 ++lostAllocationCount;
9175 if(lostAllocationCount)
9180 return lostAllocationCount;
9183 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
9185 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9186 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9188 const VmaSuballocation& suballoc = suballocations1st[i];
9189 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9191 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9193 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9194 return VK_ERROR_VALIDATION_FAILED_EXT;
9196 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9198 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9199 return VK_ERROR_VALIDATION_FAILED_EXT;
9204 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9205 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9207 const VmaSuballocation& suballoc = suballocations2nd[i];
9208 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9210 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9212 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9213 return VK_ERROR_VALIDATION_FAILED_EXT;
9215 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9217 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9218 return VK_ERROR_VALIDATION_FAILED_EXT;
9226 void VmaBlockMetadata_Linear::Alloc(
9227 const VmaAllocationRequest& request,
9228 VmaSuballocationType type,
9229 VkDeviceSize allocSize,
9233 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9237 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
9238 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
9239 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9240 suballocations2nd.push_back(newSuballoc);
9241 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
9245 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9248 if(suballocations1st.empty())
9250 suballocations1st.push_back(newSuballoc);
9255 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
9258 VMA_ASSERT(request.offset + allocSize <= GetSize());
9259 suballocations1st.push_back(newSuballoc);
9262 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
9264 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9266 switch(m_2ndVectorMode)
9268 case SECOND_VECTOR_EMPTY:
9270 VMA_ASSERT(suballocations2nd.empty());
9271 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
9273 case SECOND_VECTOR_RING_BUFFER:
9275 VMA_ASSERT(!suballocations2nd.empty());
9277 case SECOND_VECTOR_DOUBLE_STACK:
9278 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
9284 suballocations2nd.push_back(newSuballoc);
9288 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
9293 m_SumFreeSize -= newSuballoc.size;
9296 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
9298 FreeAtOffset(allocation->GetOffset());
9301 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
9303 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9304 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9306 if(!suballocations1st.empty())
9309 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9310 if(firstSuballoc.offset == offset)
9312 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9313 firstSuballoc.hAllocation = VK_NULL_HANDLE;
9314 m_SumFreeSize += firstSuballoc.size;
9315 ++m_1stNullItemsBeginCount;
9322 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
9323 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9325 VmaSuballocation& lastSuballoc = suballocations2nd.back();
9326 if(lastSuballoc.offset == offset)
9328 m_SumFreeSize += lastSuballoc.size;
9329 suballocations2nd.pop_back();
9335 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
9337 VmaSuballocation& lastSuballoc = suballocations1st.back();
9338 if(lastSuballoc.offset == offset)
9340 m_SumFreeSize += lastSuballoc.size;
9341 suballocations1st.pop_back();
9349 VmaSuballocation refSuballoc;
9350 refSuballoc.offset = offset;
9352 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
9353 suballocations1st.begin() + m_1stNullItemsBeginCount,
9354 suballocations1st.end(),
9356 if(it != suballocations1st.end())
9358 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9359 it->hAllocation = VK_NULL_HANDLE;
9360 ++m_1stNullItemsMiddleCount;
9361 m_SumFreeSize += it->size;
9367 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
9370 VmaSuballocation refSuballoc;
9371 refSuballoc.offset = offset;
9373 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
9374 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
9375 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
9376 if(it != suballocations2nd.end())
9378 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9379 it->hAllocation = VK_NULL_HANDLE;
9380 ++m_2ndNullItemsCount;
9381 m_SumFreeSize += it->size;
9387 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
9390 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 9392 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9393 const size_t suballocCount = AccessSuballocations1st().size();
9394 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
9397 void VmaBlockMetadata_Linear::CleanupAfterFree()
9399 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9400 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9404 suballocations1st.clear();
9405 suballocations2nd.clear();
9406 m_1stNullItemsBeginCount = 0;
9407 m_1stNullItemsMiddleCount = 0;
9408 m_2ndNullItemsCount = 0;
9409 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9413 const size_t suballoc1stCount = suballocations1st.size();
9414 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9415 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
9418 while(m_1stNullItemsBeginCount < suballoc1stCount &&
9419 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9421 ++m_1stNullItemsBeginCount;
9422 --m_1stNullItemsMiddleCount;
9426 while(m_1stNullItemsMiddleCount > 0 &&
9427 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
9429 --m_1stNullItemsMiddleCount;
9430 suballocations1st.pop_back();
9434 while(m_2ndNullItemsCount > 0 &&
9435 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
9437 --m_2ndNullItemsCount;
9438 suballocations2nd.pop_back();
9441 if(ShouldCompact1st())
9443 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
9444 size_t srcIndex = m_1stNullItemsBeginCount;
9445 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
9447 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
9451 if(dstIndex != srcIndex)
9453 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9457 suballocations1st.resize(nonNullItemCount);
9458 m_1stNullItemsBeginCount = 0;
9459 m_1stNullItemsMiddleCount = 0;
9463 if(suballocations2nd.empty())
9465 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9469 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9471 suballocations1st.clear();
9472 m_1stNullItemsBeginCount = 0;
9474 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9477 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9478 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9479 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9480 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9482 ++m_1stNullItemsBeginCount;
9483 --m_1stNullItemsMiddleCount;
9485 m_2ndNullItemsCount = 0;
9486 m_1stVectorIndex ^= 1;
9491 VMA_HEAVY_ASSERT(Validate());
9498 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
9499 VmaBlockMetadata(hAllocator),
9501 m_AllocationCount(0),
9505 memset(m_FreeList, 0,
sizeof(m_FreeList));
9508 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9513 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
9515 VmaBlockMetadata::Init(size);
9517 m_UsableSize = VmaPrevPow2(size);
9518 m_SumFreeSize = m_UsableSize;
9522 while(m_LevelCount < MAX_LEVELS &&
9523 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
9528 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
9529 rootNode->offset = 0;
9530 rootNode->type = Node::TYPE_FREE;
9531 rootNode->parent = VMA_NULL;
9532 rootNode->buddy = VMA_NULL;
9535 AddToFreeListFront(0, rootNode);
9538 bool VmaBlockMetadata_Buddy::Validate()
const 9541 ValidationContext ctx;
9542 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
9544 VMA_VALIDATE(
false &&
"ValidateNode failed.");
9546 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
9547 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
9550 for(uint32_t level = 0; level < m_LevelCount; ++level)
9552 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
9553 m_FreeList[level].front->free.prev == VMA_NULL);
9555 for(Node* node = m_FreeList[level].front;
9557 node = node->free.next)
9559 VMA_VALIDATE(node->type == Node::TYPE_FREE);
9561 if(node->free.next == VMA_NULL)
9563 VMA_VALIDATE(m_FreeList[level].back == node);
9567 VMA_VALIDATE(node->free.next->free.prev == node);
9573 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
9575 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
9581 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 9583 for(uint32_t level = 0; level < m_LevelCount; ++level)
9585 if(m_FreeList[level].front != VMA_NULL)
9587 return LevelToNodeSize(level);
9593 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9595 const VkDeviceSize unusableSize = GetUnusableSize();
9606 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
9608 if(unusableSize > 0)
9617 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 9619 const VkDeviceSize unusableSize = GetUnusableSize();
9621 inoutStats.
size += GetSize();
9622 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
9627 if(unusableSize > 0)
9634 #if VMA_STATS_STRING_ENABLED 9636 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 9640 CalcAllocationStatInfo(stat);
9642 PrintDetailedMap_Begin(
9648 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
9650 const VkDeviceSize unusableSize = GetUnusableSize();
9651 if(unusableSize > 0)
9653 PrintDetailedMap_UnusedRange(json,
9658 PrintDetailedMap_End(json);
9661 #endif // #if VMA_STATS_STRING_ENABLED 9663 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9664 uint32_t currentFrameIndex,
9665 uint32_t frameInUseCount,
9666 VkDeviceSize bufferImageGranularity,
9667 VkDeviceSize allocSize,
9668 VkDeviceSize allocAlignment,
9670 VmaSuballocationType allocType,
9671 bool canMakeOtherLost,
9673 VmaAllocationRequest* pAllocationRequest)
9675 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9679 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
9680 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
9681 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
9683 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
9684 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
9687 if(allocSize > m_UsableSize)
9692 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9693 for(uint32_t level = targetLevel + 1; level--; )
9695 for(Node* freeNode = m_FreeList[level].front;
9696 freeNode != VMA_NULL;
9697 freeNode = freeNode->free.next)
9699 if(freeNode->offset % allocAlignment == 0)
9701 pAllocationRequest->offset = freeNode->offset;
9702 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
9703 pAllocationRequest->sumItemSize = 0;
9704 pAllocationRequest->itemsToMakeLostCount = 0;
9705 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9714 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
9715 uint32_t currentFrameIndex,
9716 uint32_t frameInUseCount,
9717 VmaAllocationRequest* pAllocationRequest)
9723 return pAllocationRequest->itemsToMakeLostCount == 0;
9726 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9735 void VmaBlockMetadata_Buddy::Alloc(
9736 const VmaAllocationRequest& request,
9737 VmaSuballocationType type,
9738 VkDeviceSize allocSize,
9742 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9743 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
9745 Node* currNode = m_FreeList[currLevel].front;
9746 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9747 while(currNode->offset != request.offset)
9749 currNode = currNode->free.next;
9750 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9754 while(currLevel < targetLevel)
9758 RemoveFromFreeList(currLevel, currNode);
9760 const uint32_t childrenLevel = currLevel + 1;
9763 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
9764 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
9766 leftChild->offset = currNode->offset;
9767 leftChild->type = Node::TYPE_FREE;
9768 leftChild->parent = currNode;
9769 leftChild->buddy = rightChild;
9771 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9772 rightChild->type = Node::TYPE_FREE;
9773 rightChild->parent = currNode;
9774 rightChild->buddy = leftChild;
9777 currNode->type = Node::TYPE_SPLIT;
9778 currNode->split.leftChild = leftChild;
9781 AddToFreeListFront(childrenLevel, rightChild);
9782 AddToFreeListFront(childrenLevel, leftChild);
9787 currNode = m_FreeList[currLevel].front;
9796 VMA_ASSERT(currLevel == targetLevel &&
9797 currNode != VMA_NULL &&
9798 currNode->type == Node::TYPE_FREE);
9799 RemoveFromFreeList(currLevel, currNode);
9802 currNode->type = Node::TYPE_ALLOCATION;
9803 currNode->allocation.alloc = hAllocation;
9805 ++m_AllocationCount;
9807 m_SumFreeSize -= allocSize;
9810 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
9812 if(node->type == Node::TYPE_SPLIT)
9814 DeleteNode(node->split.leftChild->buddy);
9815 DeleteNode(node->split.leftChild);
9818 vma_delete(GetAllocationCallbacks(), node);
9821 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 9823 VMA_VALIDATE(level < m_LevelCount);
9824 VMA_VALIDATE(curr->parent == parent);
9825 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
9826 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
9829 case Node::TYPE_FREE:
9831 ctx.calculatedSumFreeSize += levelNodeSize;
9832 ++ctx.calculatedFreeCount;
9834 case Node::TYPE_ALLOCATION:
9835 ++ctx.calculatedAllocationCount;
9836 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
9837 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
9839 case Node::TYPE_SPLIT:
9841 const uint32_t childrenLevel = level + 1;
9842 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
9843 const Node*
const leftChild = curr->split.leftChild;
9844 VMA_VALIDATE(leftChild != VMA_NULL);
9845 VMA_VALIDATE(leftChild->offset == curr->offset);
9846 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
9848 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
9850 const Node*
const rightChild = leftChild->buddy;
9851 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
9852 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
9854 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
9865 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 9869 VkDeviceSize currLevelNodeSize = m_UsableSize;
9870 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
9871 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
9874 currLevelNodeSize = nextLevelNodeSize;
9875 nextLevelNodeSize = currLevelNodeSize >> 1;
9880 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
9883 Node* node = m_Root;
9884 VkDeviceSize nodeOffset = 0;
9886 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
9887 while(node->type == Node::TYPE_SPLIT)
9889 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
9890 if(offset < nodeOffset + nextLevelSize)
9892 node = node->split.leftChild;
9896 node = node->split.leftChild->buddy;
9897 nodeOffset += nextLevelSize;
9900 levelNodeSize = nextLevelSize;
9903 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9904 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
9907 --m_AllocationCount;
9908 m_SumFreeSize += alloc->GetSize();
9910 node->type = Node::TYPE_FREE;
9913 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
9915 RemoveFromFreeList(level, node->buddy);
9916 Node*
const parent = node->parent;
9918 vma_delete(GetAllocationCallbacks(), node->buddy);
9919 vma_delete(GetAllocationCallbacks(), node);
9920 parent->type = Node::TYPE_FREE;
9928 AddToFreeListFront(level, node);
9931 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 9935 case Node::TYPE_FREE:
9941 case Node::TYPE_ALLOCATION:
9943 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9949 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
9950 if(unusedRangeSize > 0)
9959 case Node::TYPE_SPLIT:
9961 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9962 const Node*
const leftChild = node->split.leftChild;
9963 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
9964 const Node*
const rightChild = leftChild->buddy;
9965 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
9973 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
9975 VMA_ASSERT(node->type == Node::TYPE_FREE);
9978 Node*
const frontNode = m_FreeList[level].front;
9979 if(frontNode == VMA_NULL)
9981 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9982 node->free.prev = node->free.next = VMA_NULL;
9983 m_FreeList[level].front = m_FreeList[level].back = node;
9987 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9988 node->free.prev = VMA_NULL;
9989 node->free.next = frontNode;
9990 frontNode->free.prev = node;
9991 m_FreeList[level].front = node;
9995 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
9997 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
10000 if(node->free.prev == VMA_NULL)
10002 VMA_ASSERT(m_FreeList[level].front == node);
10003 m_FreeList[level].front = node->free.next;
10007 Node*
const prevFreeNode = node->free.prev;
10008 VMA_ASSERT(prevFreeNode->free.next == node);
10009 prevFreeNode->free.next = node->free.next;
10013 if(node->free.next == VMA_NULL)
10015 VMA_ASSERT(m_FreeList[level].back == node);
10016 m_FreeList[level].back = node->free.prev;
10020 Node*
const nextFreeNode = node->free.next;
10021 VMA_ASSERT(nextFreeNode->free.prev == node);
10022 nextFreeNode->free.prev = node->free.prev;
10026 #if VMA_STATS_STRING_ENABLED 10027 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 10031 case Node::TYPE_FREE:
10032 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
10034 case Node::TYPE_ALLOCATION:
10036 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
10037 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10038 if(allocSize < levelNodeSize)
10040 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
10044 case Node::TYPE_SPLIT:
10046 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10047 const Node*
const leftChild = node->split.leftChild;
10048 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
10049 const Node*
const rightChild = leftChild->buddy;
10050 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
10057 #endif // #if VMA_STATS_STRING_ENABLED 10063 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
10064 m_pMetadata(VMA_NULL),
10065 m_MemoryTypeIndex(UINT32_MAX),
10067 m_hMemory(VK_NULL_HANDLE),
10069 m_pMappedData(VMA_NULL)
10073 void VmaDeviceMemoryBlock::Init(
10075 uint32_t newMemoryTypeIndex,
10076 VkDeviceMemory newMemory,
10077 VkDeviceSize newSize,
10079 uint32_t algorithm)
10081 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
10083 m_MemoryTypeIndex = newMemoryTypeIndex;
10085 m_hMemory = newMemory;
10090 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
10093 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
10099 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
10101 m_pMetadata->Init(newSize);
10104 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
10108 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
10110 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
10111 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
10112 m_hMemory = VK_NULL_HANDLE;
10114 vma_delete(allocator, m_pMetadata);
10115 m_pMetadata = VMA_NULL;
10118 bool VmaDeviceMemoryBlock::Validate()
const 10120 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
10121 (m_pMetadata->GetSize() != 0));
10123 return m_pMetadata->Validate();
10126 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
10128 void* pData =
nullptr;
10129 VkResult res = Map(hAllocator, 1, &pData);
10130 if(res != VK_SUCCESS)
10135 res = m_pMetadata->CheckCorruption(pData);
10137 Unmap(hAllocator, 1);
10142 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
10149 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10150 if(m_MapCount != 0)
10152 m_MapCount += count;
10153 VMA_ASSERT(m_pMappedData != VMA_NULL);
10154 if(ppData != VMA_NULL)
10156 *ppData = m_pMappedData;
10162 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
10163 hAllocator->m_hDevice,
10169 if(result == VK_SUCCESS)
10171 if(ppData != VMA_NULL)
10173 *ppData = m_pMappedData;
10175 m_MapCount = count;
10181 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
10188 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10189 if(m_MapCount >= count)
10191 m_MapCount -= count;
10192 if(m_MapCount == 0)
10194 m_pMappedData = VMA_NULL;
10195 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
10200 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
10204 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10206 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10207 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10210 VkResult res = Map(hAllocator, 1, &pData);
10211 if(res != VK_SUCCESS)
10216 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
10217 VmaWriteMagicValue(pData, allocOffset + allocSize);
10219 Unmap(hAllocator, 1);
10224 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10226 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10227 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10230 VkResult res = Map(hAllocator, 1, &pData);
10231 if(res != VK_SUCCESS)
10236 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
10238 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
10240 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
10242 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
10245 Unmap(hAllocator, 1);
10250 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
10255 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10256 hAllocation->GetBlock() ==
this);
10258 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10259 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
10260 hAllocator->m_hDevice,
10263 hAllocation->GetOffset());
10266 VkResult VmaDeviceMemoryBlock::BindImageMemory(
10271 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10272 hAllocation->GetBlock() ==
this);
10274 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10275 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
10276 hAllocator->m_hDevice,
10279 hAllocation->GetOffset());
10284 memset(&outInfo, 0,
sizeof(outInfo));
10303 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
10311 VmaPool_T::VmaPool_T(
10314 VkDeviceSize preferredBlockSize) :
10317 createInfo.memoryTypeIndex,
10318 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
10319 createInfo.minBlockCount,
10320 createInfo.maxBlockCount,
10322 createInfo.frameInUseCount,
10324 createInfo.blockSize != 0,
10330 VmaPool_T::~VmaPool_T()
10334 #if VMA_STATS_STRING_ENABLED 10336 #endif // #if VMA_STATS_STRING_ENABLED 10338 VmaBlockVector::VmaBlockVector(
10340 uint32_t memoryTypeIndex,
10341 VkDeviceSize preferredBlockSize,
10342 size_t minBlockCount,
10343 size_t maxBlockCount,
10344 VkDeviceSize bufferImageGranularity,
10345 uint32_t frameInUseCount,
10347 bool explicitBlockSize,
10348 uint32_t algorithm) :
10349 m_hAllocator(hAllocator),
10350 m_MemoryTypeIndex(memoryTypeIndex),
10351 m_PreferredBlockSize(preferredBlockSize),
10352 m_MinBlockCount(minBlockCount),
10353 m_MaxBlockCount(maxBlockCount),
10354 m_BufferImageGranularity(bufferImageGranularity),
10355 m_FrameInUseCount(frameInUseCount),
10356 m_IsCustomPool(isCustomPool),
10357 m_ExplicitBlockSize(explicitBlockSize),
10358 m_Algorithm(algorithm),
10359 m_HasEmptyBlock(false),
10360 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
10361 m_pDefragmentator(VMA_NULL),
10366 VmaBlockVector::~VmaBlockVector()
10368 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
10370 for(
size_t i = m_Blocks.size(); i--; )
10372 m_Blocks[i]->Destroy(m_hAllocator);
10373 vma_delete(m_hAllocator, m_Blocks[i]);
10377 VkResult VmaBlockVector::CreateMinBlocks()
10379 for(
size_t i = 0; i < m_MinBlockCount; ++i)
10381 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
10382 if(res != VK_SUCCESS)
10390 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
10392 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10394 const size_t blockCount = m_Blocks.size();
10403 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10405 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10406 VMA_ASSERT(pBlock);
10407 VMA_HEAVY_ASSERT(pBlock->Validate());
10408 pBlock->m_pMetadata->AddPoolStats(*pStats);
10412 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 10414 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10415 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
10416 (VMA_DEBUG_MARGIN > 0) &&
10417 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
10420 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
10422 VkResult VmaBlockVector::Allocate(
10424 uint32_t currentFrameIndex,
10426 VkDeviceSize alignment,
10428 VmaSuballocationType suballocType,
10435 const bool canCreateNewBlock =
10437 (m_Blocks.size() < m_MaxBlockCount);
10444 canMakeOtherLost =
false;
10448 if(isUpperAddress &&
10451 return VK_ERROR_FEATURE_NOT_PRESENT;
10465 return VK_ERROR_FEATURE_NOT_PRESENT;
10469 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
10471 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10474 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10481 if(!canMakeOtherLost || canCreateNewBlock)
10490 if(!m_Blocks.empty())
10492 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
10493 VMA_ASSERT(pCurrBlock);
10494 VkResult res = AllocateFromBlock(
10505 if(res == VK_SUCCESS)
10507 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
10517 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10519 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10520 VMA_ASSERT(pCurrBlock);
10521 VkResult res = AllocateFromBlock(
10532 if(res == VK_SUCCESS)
10534 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10542 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10544 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10545 VMA_ASSERT(pCurrBlock);
10546 VkResult res = AllocateFromBlock(
10557 if(res == VK_SUCCESS)
10559 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10567 if(canCreateNewBlock)
10570 VkDeviceSize newBlockSize = m_PreferredBlockSize;
10571 uint32_t newBlockSizeShift = 0;
10572 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
10574 if(!m_ExplicitBlockSize)
10577 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
10578 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
10580 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10581 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
10583 newBlockSize = smallerNewBlockSize;
10584 ++newBlockSizeShift;
10593 size_t newBlockIndex = 0;
10594 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
10596 if(!m_ExplicitBlockSize)
10598 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
10600 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10601 if(smallerNewBlockSize >= size)
10603 newBlockSize = smallerNewBlockSize;
10604 ++newBlockSizeShift;
10605 res = CreateBlock(newBlockSize, &newBlockIndex);
10614 if(res == VK_SUCCESS)
10616 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
10617 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
10619 res = AllocateFromBlock(
10630 if(res == VK_SUCCESS)
10632 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
10638 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10645 if(canMakeOtherLost)
10647 uint32_t tryIndex = 0;
10648 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
10650 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
10651 VmaAllocationRequest bestRequest = {};
10652 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
10658 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10660 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10661 VMA_ASSERT(pCurrBlock);
10662 VmaAllocationRequest currRequest = {};
10663 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10666 m_BufferImageGranularity,
10675 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10676 if(pBestRequestBlock == VMA_NULL ||
10677 currRequestCost < bestRequestCost)
10679 pBestRequestBlock = pCurrBlock;
10680 bestRequest = currRequest;
10681 bestRequestCost = currRequestCost;
10683 if(bestRequestCost == 0)
10694 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10696 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10697 VMA_ASSERT(pCurrBlock);
10698 VmaAllocationRequest currRequest = {};
10699 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10702 m_BufferImageGranularity,
10711 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10712 if(pBestRequestBlock == VMA_NULL ||
10713 currRequestCost < bestRequestCost ||
10716 pBestRequestBlock = pCurrBlock;
10717 bestRequest = currRequest;
10718 bestRequestCost = currRequestCost;
10720 if(bestRequestCost == 0 ||
10730 if(pBestRequestBlock != VMA_NULL)
10734 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
10735 if(res != VK_SUCCESS)
10741 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
10747 if(pBestRequestBlock->m_pMetadata->IsEmpty())
10749 m_HasEmptyBlock =
false;
10752 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10753 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
10754 (*pAllocation)->InitBlockAllocation(
10757 bestRequest.offset,
10763 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
10764 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
10765 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
10766 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10768 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10770 if(IsCorruptionDetectionEnabled())
10772 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
10773 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10788 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
10790 return VK_ERROR_TOO_MANY_OBJECTS;
10794 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10797 void VmaBlockVector::Free(
10800 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
10804 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10806 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
10808 if(IsCorruptionDetectionEnabled())
10810 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
10811 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
10814 if(hAllocation->IsPersistentMap())
10816 pBlock->Unmap(m_hAllocator, 1);
10819 pBlock->m_pMetadata->Free(hAllocation);
10820 VMA_HEAVY_ASSERT(pBlock->Validate());
10822 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
10825 if(pBlock->m_pMetadata->IsEmpty())
10828 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
10830 pBlockToDelete = pBlock;
10836 m_HasEmptyBlock =
true;
10841 else if(m_HasEmptyBlock)
10843 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
10844 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
10846 pBlockToDelete = pLastBlock;
10847 m_Blocks.pop_back();
10848 m_HasEmptyBlock =
false;
10852 IncrementallySortBlocks();
10857 if(pBlockToDelete != VMA_NULL)
10859 VMA_DEBUG_LOG(
" Deleted empty allocation");
10860 pBlockToDelete->Destroy(m_hAllocator);
10861 vma_delete(m_hAllocator, pBlockToDelete);
10865 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 10867 VkDeviceSize result = 0;
10868 for(
size_t i = m_Blocks.size(); i--; )
10870 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
10871 if(result >= m_PreferredBlockSize)
10879 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
10881 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10883 if(m_Blocks[blockIndex] == pBlock)
10885 VmaVectorRemove(m_Blocks, blockIndex);
10892 void VmaBlockVector::IncrementallySortBlocks()
10897 for(
size_t i = 1; i < m_Blocks.size(); ++i)
10899 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
10901 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
10908 VkResult VmaBlockVector::AllocateFromBlock(
10909 VmaDeviceMemoryBlock* pBlock,
10911 uint32_t currentFrameIndex,
10913 VkDeviceSize alignment,
10916 VmaSuballocationType suballocType,
10925 VmaAllocationRequest currRequest = {};
10926 if(pBlock->m_pMetadata->CreateAllocationRequest(
10929 m_BufferImageGranularity,
10939 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
10943 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
10944 if(res != VK_SUCCESS)
10951 if(pBlock->m_pMetadata->IsEmpty())
10953 m_HasEmptyBlock =
false;
10956 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10957 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
10958 (*pAllocation)->InitBlockAllocation(
10961 currRequest.offset,
10967 VMA_HEAVY_ASSERT(pBlock->Validate());
10968 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
10969 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10971 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10973 if(IsCorruptionDetectionEnabled())
10975 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
10976 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10980 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10983 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
10985 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
10986 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
10987 allocInfo.allocationSize = blockSize;
10988 VkDeviceMemory mem = VK_NULL_HANDLE;
10989 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
10998 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
11003 allocInfo.allocationSize,
11007 m_Blocks.push_back(pBlock);
11008 if(pNewBlockIndex != VMA_NULL)
11010 *pNewBlockIndex = m_Blocks.size() - 1;
11016 #if VMA_STATS_STRING_ENABLED 11018 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
11020 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11022 json.BeginObject();
11026 json.WriteString(
"MemoryTypeIndex");
11027 json.WriteNumber(m_MemoryTypeIndex);
11029 json.WriteString(
"BlockSize");
11030 json.WriteNumber(m_PreferredBlockSize);
11032 json.WriteString(
"BlockCount");
11033 json.BeginObject(
true);
11034 if(m_MinBlockCount > 0)
11036 json.WriteString(
"Min");
11037 json.WriteNumber((uint64_t)m_MinBlockCount);
11039 if(m_MaxBlockCount < SIZE_MAX)
11041 json.WriteString(
"Max");
11042 json.WriteNumber((uint64_t)m_MaxBlockCount);
11044 json.WriteString(
"Cur");
11045 json.WriteNumber((uint64_t)m_Blocks.size());
11048 if(m_FrameInUseCount > 0)
11050 json.WriteString(
"FrameInUseCount");
11051 json.WriteNumber(m_FrameInUseCount);
11054 if(m_Algorithm != 0)
11056 json.WriteString(
"Algorithm");
11057 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
11062 json.WriteString(
"PreferredBlockSize");
11063 json.WriteNumber(m_PreferredBlockSize);
11066 json.WriteString(
"Blocks");
11067 json.BeginObject();
11068 for(
size_t i = 0; i < m_Blocks.size(); ++i)
11070 json.BeginString();
11071 json.ContinueString(m_Blocks[i]->GetId());
11074 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
11081 #endif // #if VMA_STATS_STRING_ENABLED 11083 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
11085 uint32_t currentFrameIndex)
11087 if(m_pDefragmentator == VMA_NULL)
11089 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
11092 currentFrameIndex);
11095 return m_pDefragmentator;
11098 VkResult VmaBlockVector::Defragment(
11100 VkDeviceSize& maxBytesToMove,
11101 uint32_t& maxAllocationsToMove)
11103 if(m_pDefragmentator == VMA_NULL)
11108 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11111 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
11114 if(pDefragmentationStats != VMA_NULL)
11116 const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
11117 const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
11118 pDefragmentationStats->
bytesMoved += bytesMoved;
11120 VMA_ASSERT(bytesMoved <= maxBytesToMove);
11121 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
11122 maxBytesToMove -= bytesMoved;
11123 maxAllocationsToMove -= allocationsMoved;
11127 m_HasEmptyBlock =
false;
11128 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11130 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11131 if(pBlock->m_pMetadata->IsEmpty())
11133 if(m_Blocks.size() > m_MinBlockCount)
11135 if(pDefragmentationStats != VMA_NULL)
11138 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
11141 VmaVectorRemove(m_Blocks, blockIndex);
11142 pBlock->Destroy(m_hAllocator);
11143 vma_delete(m_hAllocator, pBlock);
11147 m_HasEmptyBlock =
true;
11155 void VmaBlockVector::DestroyDefragmentator()
11157 if(m_pDefragmentator != VMA_NULL)
11159 vma_delete(m_hAllocator, m_pDefragmentator);
11160 m_pDefragmentator = VMA_NULL;
11164 void VmaBlockVector::MakePoolAllocationsLost(
11165 uint32_t currentFrameIndex,
11166 size_t* pLostAllocationCount)
11168 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11169 size_t lostAllocationCount = 0;
11170 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11172 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11173 VMA_ASSERT(pBlock);
11174 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
11176 if(pLostAllocationCount != VMA_NULL)
11178 *pLostAllocationCount = lostAllocationCount;
11182 VkResult VmaBlockVector::CheckCorruption()
11184 if(!IsCorruptionDetectionEnabled())
11186 return VK_ERROR_FEATURE_NOT_PRESENT;
11189 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11190 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11192 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11193 VMA_ASSERT(pBlock);
11194 VkResult res = pBlock->CheckCorruption(m_hAllocator);
11195 if(res != VK_SUCCESS)
11203 void VmaBlockVector::AddStats(
VmaStats* pStats)
11205 const uint32_t memTypeIndex = m_MemoryTypeIndex;
11206 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
11208 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11210 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11212 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11213 VMA_ASSERT(pBlock);
11214 VMA_HEAVY_ASSERT(pBlock->Validate());
11216 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
11217 VmaAddStatInfo(pStats->
total, allocationStatInfo);
11218 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
11219 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11226 VmaDefragmentator::VmaDefragmentator(
11228 VmaBlockVector* pBlockVector,
11229 uint32_t currentFrameIndex) :
11230 m_hAllocator(hAllocator),
11231 m_pBlockVector(pBlockVector),
11232 m_CurrentFrameIndex(currentFrameIndex),
11234 m_AllocationsMoved(0),
11235 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
11236 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
11238 VMA_ASSERT(pBlockVector->GetAlgorithm() == 0);
11241 VmaDefragmentator::~VmaDefragmentator()
11243 for(
size_t i = m_Blocks.size(); i--; )
11245 vma_delete(m_hAllocator, m_Blocks[i]);
11249 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
11251 AllocationInfo allocInfo;
11252 allocInfo.m_hAllocation = hAlloc;
11253 allocInfo.m_pChanged = pChanged;
11254 m_Allocations.push_back(allocInfo);
11257 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
11260 if(m_pMappedDataForDefragmentation)
11262 *ppMappedData = m_pMappedDataForDefragmentation;
11267 if(m_pBlock->GetMappedData())
11269 *ppMappedData = m_pBlock->GetMappedData();
11274 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
11275 *ppMappedData = m_pMappedDataForDefragmentation;
11279 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
11281 if(m_pMappedDataForDefragmentation != VMA_NULL)
11283 m_pBlock->Unmap(hAllocator, 1);
11287 VkResult VmaDefragmentator::DefragmentRound(
11288 VkDeviceSize maxBytesToMove,
11289 uint32_t maxAllocationsToMove)
11291 if(m_Blocks.empty())
11296 size_t srcBlockIndex = m_Blocks.size() - 1;
11297 size_t srcAllocIndex = SIZE_MAX;
11303 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
11305 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
11308 if(srcBlockIndex == 0)
11315 srcAllocIndex = SIZE_MAX;
11320 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
11324 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
11325 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
11327 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
11328 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
11329 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
11330 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
11333 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
11335 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
11336 VmaAllocationRequest dstAllocRequest;
11337 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
11338 m_CurrentFrameIndex,
11339 m_pBlockVector->GetFrameInUseCount(),
11340 m_pBlockVector->GetBufferImageGranularity(),
11347 &dstAllocRequest) &&
11349 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
11351 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
11354 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
11355 (m_BytesMoved + size > maxBytesToMove))
11357 return VK_INCOMPLETE;
11360 void* pDstMappedData = VMA_NULL;
11361 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
11362 if(res != VK_SUCCESS)
11367 void* pSrcMappedData = VMA_NULL;
11368 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
11369 if(res != VK_SUCCESS)
11376 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
11377 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
11378 static_cast<size_t>(size));
11380 if(VMA_DEBUG_MARGIN > 0)
11382 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
11383 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
11386 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
11391 allocInfo.m_hAllocation);
11392 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
11394 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
11396 if(allocInfo.m_pChanged != VMA_NULL)
11398 *allocInfo.m_pChanged = VK_TRUE;
11401 ++m_AllocationsMoved;
11402 m_BytesMoved += size;
11404 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
11412 if(srcAllocIndex > 0)
11418 if(srcBlockIndex > 0)
11421 srcAllocIndex = SIZE_MAX;
11431 VkResult VmaDefragmentator::Defragment(
11432 VkDeviceSize maxBytesToMove,
11433 uint32_t maxAllocationsToMove)
11435 if(m_Allocations.empty())
11441 const size_t blockCount = m_pBlockVector->m_Blocks.size();
11442 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11444 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
11445 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
11446 m_Blocks.push_back(pBlockInfo);
11450 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
11453 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
11455 AllocationInfo& allocInfo = m_Allocations[blockIndex];
11457 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11459 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
11460 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
11461 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
11463 (*it)->m_Allocations.push_back(allocInfo);
11471 m_Allocations.clear();
11473 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11475 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
11476 pBlockInfo->CalcHasNonMovableAllocations();
11477 pBlockInfo->SortAllocationsBySizeDescecnding();
11481 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
11484 VkResult result = VK_SUCCESS;
11485 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
11487 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
11491 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11493 m_Blocks[blockIndex]->Unmap(m_hAllocator);
11499 bool VmaDefragmentator::MoveMakesSense(
11500 size_t dstBlockIndex, VkDeviceSize dstOffset,
11501 size_t srcBlockIndex, VkDeviceSize srcOffset)
11503 if(dstBlockIndex < srcBlockIndex)
11507 if(dstBlockIndex > srcBlockIndex)
11511 if(dstOffset < srcOffset)
11521 #if VMA_RECORDING_ENABLED 11523 VmaRecorder::VmaRecorder() :
11528 m_StartCounter(INT64_MAX)
11534 m_UseMutex = useMutex;
11535 m_Flags = settings.
flags;
11537 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
11538 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
11541 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
11544 return VK_ERROR_INITIALIZATION_FAILED;
11548 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
11549 fprintf(m_File,
"%s\n",
"1,4");
11554 VmaRecorder::~VmaRecorder()
11556 if(m_File != VMA_NULL)
11562 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
11564 CallParams callParams;
11565 GetBasicParams(callParams);
11567 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11568 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
11572 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
11574 CallParams callParams;
11575 GetBasicParams(callParams);
11577 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11578 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
11584 CallParams callParams;
11585 GetBasicParams(callParams);
11587 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11588 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
11599 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
11601 CallParams callParams;
11602 GetBasicParams(callParams);
11604 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11605 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
11610 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
11611 const VkMemoryRequirements& vkMemReq,
11615 CallParams callParams;
11616 GetBasicParams(callParams);
11618 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11619 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11620 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11622 vkMemReq.alignment,
11623 vkMemReq.memoryTypeBits,
11631 userDataStr.GetString());
11635 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
11636 const VkMemoryRequirements& vkMemReq,
11637 bool requiresDedicatedAllocation,
11638 bool prefersDedicatedAllocation,
11642 CallParams callParams;
11643 GetBasicParams(callParams);
11645 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11646 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11647 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11649 vkMemReq.alignment,
11650 vkMemReq.memoryTypeBits,
11651 requiresDedicatedAllocation ? 1 : 0,
11652 prefersDedicatedAllocation ? 1 : 0,
11660 userDataStr.GetString());
11664 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
11665 const VkMemoryRequirements& vkMemReq,
11666 bool requiresDedicatedAllocation,
11667 bool prefersDedicatedAllocation,
11671 CallParams callParams;
11672 GetBasicParams(callParams);
11674 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11675 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11676 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11678 vkMemReq.alignment,
11679 vkMemReq.memoryTypeBits,
11680 requiresDedicatedAllocation ? 1 : 0,
11681 prefersDedicatedAllocation ? 1 : 0,
11689 userDataStr.GetString());
11693 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
11696 CallParams callParams;
11697 GetBasicParams(callParams);
11699 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11700 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11705 void VmaRecorder::RecordResizeAllocation(
11706 uint32_t frameIndex,
11708 VkDeviceSize newSize)
11710 CallParams callParams;
11711 GetBasicParams(callParams);
11713 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11714 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
11715 allocation, newSize);
11719 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
11721 const void* pUserData)
11723 CallParams callParams;
11724 GetBasicParams(callParams);
11726 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11727 UserDataString userDataStr(
11730 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11732 userDataStr.GetString());
11736 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
11739 CallParams callParams;
11740 GetBasicParams(callParams);
11742 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11743 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11748 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
11751 CallParams callParams;
11752 GetBasicParams(callParams);
11754 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11755 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11760 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
11763 CallParams callParams;
11764 GetBasicParams(callParams);
11766 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11767 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11772 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
11773 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11775 CallParams callParams;
11776 GetBasicParams(callParams);
11778 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11779 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11786 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
11787 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11789 CallParams callParams;
11790 GetBasicParams(callParams);
11792 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11793 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11800 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
11801 const VkBufferCreateInfo& bufCreateInfo,
11805 CallParams callParams;
11806 GetBasicParams(callParams);
11808 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11809 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11810 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11811 bufCreateInfo.flags,
11812 bufCreateInfo.size,
11813 bufCreateInfo.usage,
11814 bufCreateInfo.sharingMode,
11815 allocCreateInfo.
flags,
11816 allocCreateInfo.
usage,
11820 allocCreateInfo.
pool,
11822 userDataStr.GetString());
11826 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
11827 const VkImageCreateInfo& imageCreateInfo,
11831 CallParams callParams;
11832 GetBasicParams(callParams);
11834 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11835 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11836 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11837 imageCreateInfo.flags,
11838 imageCreateInfo.imageType,
11839 imageCreateInfo.format,
11840 imageCreateInfo.extent.width,
11841 imageCreateInfo.extent.height,
11842 imageCreateInfo.extent.depth,
11843 imageCreateInfo.mipLevels,
11844 imageCreateInfo.arrayLayers,
11845 imageCreateInfo.samples,
11846 imageCreateInfo.tiling,
11847 imageCreateInfo.usage,
11848 imageCreateInfo.sharingMode,
11849 imageCreateInfo.initialLayout,
11850 allocCreateInfo.
flags,
11851 allocCreateInfo.
usage,
11855 allocCreateInfo.
pool,
11857 userDataStr.GetString());
11861 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
11864 CallParams callParams;
11865 GetBasicParams(callParams);
11867 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11868 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
11873 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
11876 CallParams callParams;
11877 GetBasicParams(callParams);
11879 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11880 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
11885 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
11888 CallParams callParams;
11889 GetBasicParams(callParams);
11891 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11892 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11897 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
11900 CallParams callParams;
11901 GetBasicParams(callParams);
11903 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11904 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
11909 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
11912 CallParams callParams;
11913 GetBasicParams(callParams);
11915 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11916 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
11923 if(pUserData != VMA_NULL)
11927 m_Str = (
const char*)pUserData;
11931 sprintf_s(m_PtrStr,
"%p", pUserData);
11941 void VmaRecorder::WriteConfiguration(
11942 const VkPhysicalDeviceProperties& devProps,
11943 const VkPhysicalDeviceMemoryProperties& memProps,
11944 bool dedicatedAllocationExtensionEnabled)
11946 fprintf(m_File,
"Config,Begin\n");
11948 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
11949 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
11950 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
11951 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
11952 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
11953 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
11955 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
11956 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
11957 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
11959 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
11960 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
11962 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
11963 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
11965 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
11966 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
11968 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
11969 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
11972 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
11974 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
11975 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
11976 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
11977 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
11978 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
11979 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
11980 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
11981 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
11982 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
11984 fprintf(m_File,
"Config,End\n");
11987 void VmaRecorder::GetBasicParams(CallParams& outParams)
11989 outParams.threadId = GetCurrentThreadId();
11991 LARGE_INTEGER counter;
11992 QueryPerformanceCounter(&counter);
11993 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
11996 void VmaRecorder::Flush()
12004 #endif // #if VMA_RECORDING_ENABLED 12012 m_hDevice(pCreateInfo->device),
12013 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
12014 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
12015 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
12016 m_PreferredLargeHeapBlockSize(0),
12017 m_PhysicalDevice(pCreateInfo->physicalDevice),
12018 m_CurrentFrameIndex(0),
12019 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
12022 ,m_pRecorder(VMA_NULL)
12025 if(VMA_DEBUG_DETECT_CORRUPTION)
12028 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
12033 #if !(VMA_DEDICATED_ALLOCATION) 12036 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
12040 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
12041 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
12042 memset(&m_MemProps, 0,
sizeof(m_MemProps));
12044 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
12045 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
12047 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12049 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
12060 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
12061 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
12063 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
12064 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
12065 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
12066 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
12073 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
12075 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
12076 if(limit != VK_WHOLE_SIZE)
12078 m_HeapSizeLimit[heapIndex] = limit;
12079 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
12081 m_MemProps.memoryHeaps[heapIndex].size = limit;
12087 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12089 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
12091 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
12094 preferredBlockSize,
12097 GetBufferImageGranularity(),
12104 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
12111 VkResult res = VK_SUCCESS;
12116 #if VMA_RECORDING_ENABLED 12117 m_pRecorder = vma_new(
this, VmaRecorder)();
12119 if(res != VK_SUCCESS)
12123 m_pRecorder->WriteConfiguration(
12124 m_PhysicalDeviceProperties,
12126 m_UseKhrDedicatedAllocation);
12127 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
12129 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
12130 return VK_ERROR_FEATURE_NOT_PRESENT;
12137 VmaAllocator_T::~VmaAllocator_T()
12139 #if VMA_RECORDING_ENABLED 12140 if(m_pRecorder != VMA_NULL)
12142 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
12143 vma_delete(
this, m_pRecorder);
12147 VMA_ASSERT(m_Pools.empty());
12149 for(
size_t i = GetMemoryTypeCount(); i--; )
12151 vma_delete(
this, m_pDedicatedAllocations[i]);
12152 vma_delete(
this, m_pBlockVectors[i]);
12156 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
12158 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 12159 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
12160 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
12161 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
12162 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
12163 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
12164 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
12165 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
12166 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
12167 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
12168 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
12169 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
12170 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
12171 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
12172 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
12173 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
12174 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
12175 #if VMA_DEDICATED_ALLOCATION 12176 if(m_UseKhrDedicatedAllocation)
12178 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
12179 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
12180 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
12181 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
12183 #endif // #if VMA_DEDICATED_ALLOCATION 12184 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 12186 #define VMA_COPY_IF_NOT_NULL(funcName) \ 12187 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 12189 if(pVulkanFunctions != VMA_NULL)
12191 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
12192 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
12193 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
12194 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
12195 VMA_COPY_IF_NOT_NULL(vkMapMemory);
12196 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
12197 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
12198 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
12199 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
12200 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
12201 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
12202 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
12203 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
12204 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
12205 VMA_COPY_IF_NOT_NULL(vkCreateImage);
12206 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
12207 #if VMA_DEDICATED_ALLOCATION 12208 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
12209 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
12213 #undef VMA_COPY_IF_NOT_NULL 12217 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
12218 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
12219 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
12220 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
12221 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
12222 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
12223 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
12224 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
12225 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
12226 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
12227 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
12228 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
12229 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
12230 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
12231 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
12232 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
12233 #if VMA_DEDICATED_ALLOCATION 12234 if(m_UseKhrDedicatedAllocation)
12236 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
12237 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
12242 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
12244 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12245 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
12246 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
12247 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
12250 VkResult VmaAllocator_T::AllocateMemoryOfType(
12252 VkDeviceSize alignment,
12253 bool dedicatedAllocation,
12254 VkBuffer dedicatedBuffer,
12255 VkImage dedicatedImage,
12257 uint32_t memTypeIndex,
12258 VmaSuballocationType suballocType,
12261 VMA_ASSERT(pAllocation != VMA_NULL);
12262 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
12268 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12273 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
12274 VMA_ASSERT(blockVector);
12276 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
12277 bool preferDedicatedMemory =
12278 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
12279 dedicatedAllocation ||
12281 size > preferredBlockSize / 2;
12283 if(preferDedicatedMemory &&
12285 finalCreateInfo.
pool == VK_NULL_HANDLE)
12294 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12298 return AllocateDedicatedMemory(
12312 VkResult res = blockVector->Allocate(
12314 m_CurrentFrameIndex.load(),
12320 if(res == VK_SUCCESS)
12328 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12332 res = AllocateDedicatedMemory(
12338 finalCreateInfo.pUserData,
12342 if(res == VK_SUCCESS)
12345 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
12351 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12358 VkResult VmaAllocator_T::AllocateDedicatedMemory(
12360 VmaSuballocationType suballocType,
12361 uint32_t memTypeIndex,
12363 bool isUserDataString,
12365 VkBuffer dedicatedBuffer,
12366 VkImage dedicatedImage,
12369 VMA_ASSERT(pAllocation);
12371 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12372 allocInfo.memoryTypeIndex = memTypeIndex;
12373 allocInfo.allocationSize = size;
12375 #if VMA_DEDICATED_ALLOCATION 12376 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
12377 if(m_UseKhrDedicatedAllocation)
12379 if(dedicatedBuffer != VK_NULL_HANDLE)
12381 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
12382 dedicatedAllocInfo.buffer = dedicatedBuffer;
12383 allocInfo.pNext = &dedicatedAllocInfo;
12385 else if(dedicatedImage != VK_NULL_HANDLE)
12387 dedicatedAllocInfo.image = dedicatedImage;
12388 allocInfo.pNext = &dedicatedAllocInfo;
12391 #endif // #if VMA_DEDICATED_ALLOCATION 12394 VkDeviceMemory hMemory = VK_NULL_HANDLE;
12395 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
12398 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12402 void* pMappedData = VMA_NULL;
12405 res = (*m_VulkanFunctions.vkMapMemory)(
12414 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
12415 FreeVulkanMemory(memTypeIndex, size, hMemory);
12420 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
12421 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
12422 (*pAllocation)->SetUserData(
this, pUserData);
12423 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12425 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12430 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12431 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12432 VMA_ASSERT(pDedicatedAllocations);
12433 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
12436 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
12441 void VmaAllocator_T::GetBufferMemoryRequirements(
12443 VkMemoryRequirements& memReq,
12444 bool& requiresDedicatedAllocation,
12445 bool& prefersDedicatedAllocation)
const 12447 #if VMA_DEDICATED_ALLOCATION 12448 if(m_UseKhrDedicatedAllocation)
12450 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
12451 memReqInfo.buffer = hBuffer;
12453 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12455 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12456 memReq2.pNext = &memDedicatedReq;
12458 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12460 memReq = memReq2.memoryRequirements;
12461 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12462 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12465 #endif // #if VMA_DEDICATED_ALLOCATION 12467 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
12468 requiresDedicatedAllocation =
false;
12469 prefersDedicatedAllocation =
false;
12473 void VmaAllocator_T::GetImageMemoryRequirements(
12475 VkMemoryRequirements& memReq,
12476 bool& requiresDedicatedAllocation,
12477 bool& prefersDedicatedAllocation)
const 12479 #if VMA_DEDICATED_ALLOCATION 12480 if(m_UseKhrDedicatedAllocation)
12482 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
12483 memReqInfo.image = hImage;
12485 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12487 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12488 memReq2.pNext = &memDedicatedReq;
12490 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12492 memReq = memReq2.memoryRequirements;
12493 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12494 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12497 #endif // #if VMA_DEDICATED_ALLOCATION 12499 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
12500 requiresDedicatedAllocation =
false;
12501 prefersDedicatedAllocation =
false;
12505 VkResult VmaAllocator_T::AllocateMemory(
12506 const VkMemoryRequirements& vkMemReq,
12507 bool requiresDedicatedAllocation,
12508 bool prefersDedicatedAllocation,
12509 VkBuffer dedicatedBuffer,
12510 VkImage dedicatedImage,
12512 VmaSuballocationType suballocType,
12515 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
12517 if(vkMemReq.size == 0)
12519 return VK_ERROR_VALIDATION_FAILED_EXT;
12524 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
12525 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12530 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
12531 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12533 if(requiresDedicatedAllocation)
12537 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
12538 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12540 if(createInfo.
pool != VK_NULL_HANDLE)
12542 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
12543 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12546 if((createInfo.
pool != VK_NULL_HANDLE) &&
12549 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
12550 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12553 if(createInfo.
pool != VK_NULL_HANDLE)
12555 const VkDeviceSize alignmentForPool = VMA_MAX(
12556 vkMemReq.alignment,
12557 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
12558 return createInfo.
pool->m_BlockVector.Allocate(
12560 m_CurrentFrameIndex.load(),
12570 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
12571 uint32_t memTypeIndex = UINT32_MAX;
12573 if(res == VK_SUCCESS)
12575 VkDeviceSize alignmentForMemType = VMA_MAX(
12576 vkMemReq.alignment,
12577 GetMemoryTypeMinAlignment(memTypeIndex));
12579 res = AllocateMemoryOfType(
12581 alignmentForMemType,
12582 requiresDedicatedAllocation || prefersDedicatedAllocation,
12590 if(res == VK_SUCCESS)
12600 memoryTypeBits &= ~(1u << memTypeIndex);
12603 if(res == VK_SUCCESS)
12605 alignmentForMemType = VMA_MAX(
12606 vkMemReq.alignment,
12607 GetMemoryTypeMinAlignment(memTypeIndex));
12609 res = AllocateMemoryOfType(
12611 alignmentForMemType,
12612 requiresDedicatedAllocation || prefersDedicatedAllocation,
12620 if(res == VK_SUCCESS)
12630 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12641 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
12643 VMA_ASSERT(allocation);
12645 if(TouchAllocation(allocation))
12647 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12649 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
12652 switch(allocation->GetType())
12654 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12656 VmaBlockVector* pBlockVector = VMA_NULL;
12657 VmaPool hPool = allocation->GetPool();
12658 if(hPool != VK_NULL_HANDLE)
12660 pBlockVector = &hPool->m_BlockVector;
12664 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12665 pBlockVector = m_pBlockVectors[memTypeIndex];
12667 pBlockVector->Free(allocation);
12670 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12671 FreeDedicatedMemory(allocation);
12678 allocation->SetUserData(
this, VMA_NULL);
12679 vma_delete(
this, allocation);
12682 VkResult VmaAllocator_T::ResizeAllocation(
12684 VkDeviceSize newSize)
12686 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
12688 return VK_ERROR_VALIDATION_FAILED_EXT;
12690 if(newSize == alloc->GetSize())
12695 switch(alloc->GetType())
12697 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12698 return VK_ERROR_FEATURE_NOT_PRESENT;
12699 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12700 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
12702 alloc->ChangeSize(newSize);
12703 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
12708 return VK_ERROR_OUT_OF_POOL_MEMORY;
12712 return VK_ERROR_VALIDATION_FAILED_EXT;
12716 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
12719 InitStatInfo(pStats->
total);
12720 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
12722 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12726 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12728 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12729 VMA_ASSERT(pBlockVector);
12730 pBlockVector->AddStats(pStats);
12735 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12736 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12738 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
12743 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12745 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12746 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12747 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12748 VMA_ASSERT(pDedicatedAllocVector);
12749 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
12752 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
12753 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12754 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12755 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12760 VmaPostprocessCalcStatInfo(pStats->
total);
12761 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
12762 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
12763 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
12764 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
12767 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
12769 VkResult VmaAllocator_T::Defragment(
12771 size_t allocationCount,
12772 VkBool32* pAllocationsChanged,
12776 if(pAllocationsChanged != VMA_NULL)
12778 memset(pAllocationsChanged, 0, allocationCount *
sizeof(VkBool32));
12780 if(pDefragmentationStats != VMA_NULL)
12782 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
12785 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
12787 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
12789 const size_t poolCount = m_Pools.size();
12792 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12795 VMA_ASSERT(hAlloc);
12796 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
12798 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12799 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
12801 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
12803 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
12805 VmaBlockVector* pAllocBlockVector = VMA_NULL;
12807 const VmaPool hAllocPool = hAlloc->GetPool();
12809 if(hAllocPool != VK_NULL_HANDLE)
12812 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
12814 pAllocBlockVector = &hAllocPool->m_BlockVector;
12820 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
12823 if(pAllocBlockVector != VMA_NULL)
12825 VmaDefragmentator*
const pDefragmentator =
12826 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
12827 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
12828 &pAllocationsChanged[allocIndex] : VMA_NULL;
12829 pDefragmentator->AddAllocation(hAlloc, pChanged);
12834 VkResult result = VK_SUCCESS;
12838 VkDeviceSize maxBytesToMove = SIZE_MAX;
12839 uint32_t maxAllocationsToMove = UINT32_MAX;
12840 if(pDefragmentationInfo != VMA_NULL)
12847 for(uint32_t memTypeIndex = 0;
12848 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
12852 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12854 result = m_pBlockVectors[memTypeIndex]->Defragment(
12855 pDefragmentationStats,
12857 maxAllocationsToMove);
12862 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
12864 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
12865 pDefragmentationStats,
12867 maxAllocationsToMove);
12873 for(
size_t poolIndex = poolCount; poolIndex--; )
12875 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
12879 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
12881 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12883 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
12892 if(hAllocation->CanBecomeLost())
12898 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12899 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12902 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12906 pAllocationInfo->
offset = 0;
12907 pAllocationInfo->
size = hAllocation->GetSize();
12909 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12912 else if(localLastUseFrameIndex == localCurrFrameIndex)
12914 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12915 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12916 pAllocationInfo->
offset = hAllocation->GetOffset();
12917 pAllocationInfo->
size = hAllocation->GetSize();
12919 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12924 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12926 localLastUseFrameIndex = localCurrFrameIndex;
12933 #if VMA_STATS_STRING_ENABLED 12934 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12935 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12938 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12939 if(localLastUseFrameIndex == localCurrFrameIndex)
12945 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12947 localLastUseFrameIndex = localCurrFrameIndex;
12953 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12954 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12955 pAllocationInfo->
offset = hAllocation->GetOffset();
12956 pAllocationInfo->
size = hAllocation->GetSize();
12957 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
12958 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12962 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
12965 if(hAllocation->CanBecomeLost())
12967 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12968 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12971 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12975 else if(localLastUseFrameIndex == localCurrFrameIndex)
12981 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12983 localLastUseFrameIndex = localCurrFrameIndex;
12990 #if VMA_STATS_STRING_ENABLED 12991 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12992 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12995 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12996 if(localLastUseFrameIndex == localCurrFrameIndex)
13002 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
13004 localLastUseFrameIndex = localCurrFrameIndex;
13016 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
13026 return VK_ERROR_INITIALIZATION_FAILED;
13029 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
13031 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
13033 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
13034 if(res != VK_SUCCESS)
13036 vma_delete(
this, *pPool);
13043 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13044 (*pPool)->SetId(m_NextPoolId++);
13045 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
13051 void VmaAllocator_T::DestroyPool(
VmaPool pool)
13055 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13056 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
13057 VMA_ASSERT(success &&
"Pool not found in Allocator.");
13060 vma_delete(
this, pool);
13065 pool->m_BlockVector.GetPoolStats(pPoolStats);
13068 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
13070 m_CurrentFrameIndex.store(frameIndex);
13073 void VmaAllocator_T::MakePoolAllocationsLost(
13075 size_t* pLostAllocationCount)
13077 hPool->m_BlockVector.MakePoolAllocationsLost(
13078 m_CurrentFrameIndex.load(),
13079 pLostAllocationCount);
13082 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
13084 return hPool->m_BlockVector.CheckCorruption();
13087 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
13089 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
13092 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13094 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
13096 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
13097 VMA_ASSERT(pBlockVector);
13098 VkResult localRes = pBlockVector->CheckCorruption();
13101 case VK_ERROR_FEATURE_NOT_PRESENT:
13104 finalRes = VK_SUCCESS;
13114 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13115 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
13117 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
13119 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
13122 case VK_ERROR_FEATURE_NOT_PRESENT:
13125 finalRes = VK_SUCCESS;
13137 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
13139 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
13140 (*pAllocation)->InitLost();
13143 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
13145 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
13148 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
13150 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
13151 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
13153 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
13154 if(res == VK_SUCCESS)
13156 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
13161 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
13166 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
13169 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
13171 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
13177 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
13179 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
13181 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
13184 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
13186 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
13187 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
13189 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
13190 m_HeapSizeLimit[heapIndex] += size;
13194 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
13196 if(hAllocation->CanBecomeLost())
13198 return VK_ERROR_MEMORY_MAP_FAILED;
13201 switch(hAllocation->GetType())
13203 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13205 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
13206 char *pBytes = VMA_NULL;
13207 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
13208 if(res == VK_SUCCESS)
13210 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
13211 hAllocation->BlockAllocMap();
13215 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13216 return hAllocation->DedicatedAllocMap(
this, ppData);
13219 return VK_ERROR_MEMORY_MAP_FAILED;
13225 switch(hAllocation->GetType())
13227 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13229 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
13230 hAllocation->BlockAllocUnmap();
13231 pBlock->Unmap(
this, 1);
13234 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13235 hAllocation->DedicatedAllocUnmap(
this);
13242 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
13244 VkResult res = VK_SUCCESS;
13245 switch(hAllocation->GetType())
13247 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13248 res = GetVulkanFunctions().vkBindBufferMemory(
13251 hAllocation->GetMemory(),
13254 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13256 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13257 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
13258 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
13267 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
13269 VkResult res = VK_SUCCESS;
13270 switch(hAllocation->GetType())
13272 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13273 res = GetVulkanFunctions().vkBindImageMemory(
13276 hAllocation->GetMemory(),
13279 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13281 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13282 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
13283 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
13292 void VmaAllocator_T::FlushOrInvalidateAllocation(
13294 VkDeviceSize offset, VkDeviceSize size,
13295 VMA_CACHE_OPERATION op)
13297 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
13298 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
13300 const VkDeviceSize allocationSize = hAllocation->GetSize();
13301 VMA_ASSERT(offset <= allocationSize);
13303 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13305 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13306 memRange.memory = hAllocation->GetMemory();
13308 switch(hAllocation->GetType())
13310 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13311 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13312 if(size == VK_WHOLE_SIZE)
13314 memRange.size = allocationSize - memRange.offset;
13318 VMA_ASSERT(offset + size <= allocationSize);
13319 memRange.size = VMA_MIN(
13320 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
13321 allocationSize - memRange.offset);
13325 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13328 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13329 if(size == VK_WHOLE_SIZE)
13331 size = allocationSize - offset;
13335 VMA_ASSERT(offset + size <= allocationSize);
13337 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
13340 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
13341 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
13342 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
13343 memRange.offset += allocationOffset;
13344 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
13355 case VMA_CACHE_FLUSH:
13356 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
13358 case VMA_CACHE_INVALIDATE:
13359 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
13368 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
13370 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
13372 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
13374 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13375 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
13376 VMA_ASSERT(pDedicatedAllocations);
13377 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
13378 VMA_ASSERT(success);
13381 VkDeviceMemory hMemory = allocation->GetMemory();
13393 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
13395 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
13398 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
13400 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
13401 !hAllocation->CanBecomeLost() &&
13402 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13404 void* pData = VMA_NULL;
13405 VkResult res = Map(hAllocation, &pData);
13406 if(res == VK_SUCCESS)
13408 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
13409 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
13410 Unmap(hAllocation);
13414 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
13419 #if VMA_STATS_STRING_ENABLED 13421 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
13423 bool dedicatedAllocationsStarted =
false;
13424 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13426 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13427 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
13428 VMA_ASSERT(pDedicatedAllocVector);
13429 if(pDedicatedAllocVector->empty() ==
false)
13431 if(dedicatedAllocationsStarted ==
false)
13433 dedicatedAllocationsStarted =
true;
13434 json.WriteString(
"DedicatedAllocations");
13435 json.BeginObject();
13438 json.BeginString(
"Type ");
13439 json.ContinueString(memTypeIndex);
13444 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
13446 json.BeginObject(
true);
13448 hAlloc->PrintParameters(json);
13455 if(dedicatedAllocationsStarted)
13461 bool allocationsStarted =
false;
13462 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13464 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
13466 if(allocationsStarted ==
false)
13468 allocationsStarted =
true;
13469 json.WriteString(
"DefaultPools");
13470 json.BeginObject();
13473 json.BeginString(
"Type ");
13474 json.ContinueString(memTypeIndex);
13477 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
13480 if(allocationsStarted)
13488 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13489 const size_t poolCount = m_Pools.size();
13492 json.WriteString(
"Pools");
13493 json.BeginObject();
13494 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13496 json.BeginString();
13497 json.ContinueString(m_Pools[poolIndex]->GetId());
13500 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
13507 #endif // #if VMA_STATS_STRING_ENABLED 13516 VMA_ASSERT(pCreateInfo && pAllocator);
13517 VMA_DEBUG_LOG(
"vmaCreateAllocator");
13519 return (*pAllocator)->Init(pCreateInfo);
13525 if(allocator != VK_NULL_HANDLE)
13527 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
13528 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
13529 vma_delete(&allocationCallbacks, allocator);
13535 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
13537 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
13538 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
13543 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
13545 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
13546 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
13551 uint32_t memoryTypeIndex,
13552 VkMemoryPropertyFlags* pFlags)
13554 VMA_ASSERT(allocator && pFlags);
13555 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
13556 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
13561 uint32_t frameIndex)
13563 VMA_ASSERT(allocator);
13564 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
13566 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13568 allocator->SetCurrentFrameIndex(frameIndex);
13575 VMA_ASSERT(allocator && pStats);
13576 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13577 allocator->CalculateStats(pStats);
13580 #if VMA_STATS_STRING_ENABLED 13584 char** ppStatsString,
13585 VkBool32 detailedMap)
13587 VMA_ASSERT(allocator && ppStatsString);
13588 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13590 VmaStringBuilder sb(allocator);
13592 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
13593 json.BeginObject();
13596 allocator->CalculateStats(&stats);
13598 json.WriteString(
"Total");
13599 VmaPrintStatInfo(json, stats.
total);
13601 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
13603 json.BeginString(
"Heap ");
13604 json.ContinueString(heapIndex);
13606 json.BeginObject();
13608 json.WriteString(
"Size");
13609 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
13611 json.WriteString(
"Flags");
13612 json.BeginArray(
true);
13613 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
13615 json.WriteString(
"DEVICE_LOCAL");
13621 json.WriteString(
"Stats");
13622 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
13625 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
13627 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
13629 json.BeginString(
"Type ");
13630 json.ContinueString(typeIndex);
13633 json.BeginObject();
13635 json.WriteString(
"Flags");
13636 json.BeginArray(
true);
13637 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
13638 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
13640 json.WriteString(
"DEVICE_LOCAL");
13642 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13644 json.WriteString(
"HOST_VISIBLE");
13646 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
13648 json.WriteString(
"HOST_COHERENT");
13650 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
13652 json.WriteString(
"HOST_CACHED");
13654 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
13656 json.WriteString(
"LAZILY_ALLOCATED");
13662 json.WriteString(
"Stats");
13663 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
13672 if(detailedMap == VK_TRUE)
13674 allocator->PrintDetailedMap(json);
13680 const size_t len = sb.GetLength();
13681 char*
const pChars = vma_new_array(allocator,
char, len + 1);
13684 memcpy(pChars, sb.GetData(), len);
13686 pChars[len] =
'\0';
13687 *ppStatsString = pChars;
13692 char* pStatsString)
13694 if(pStatsString != VMA_NULL)
13696 VMA_ASSERT(allocator);
13697 size_t len = strlen(pStatsString);
13698 vma_delete_array(allocator, pStatsString, len + 1);
13702 #endif // #if VMA_STATS_STRING_ENABLED 13709 uint32_t memoryTypeBits,
13711 uint32_t* pMemoryTypeIndex)
13713 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13714 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13715 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13722 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
13723 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
13728 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13732 switch(pAllocationCreateInfo->
usage)
13737 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13739 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13743 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13746 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13747 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13749 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13753 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13754 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
13760 *pMemoryTypeIndex = UINT32_MAX;
13761 uint32_t minCost = UINT32_MAX;
13762 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
13763 memTypeIndex < allocator->GetMemoryTypeCount();
13764 ++memTypeIndex, memTypeBit <<= 1)
13767 if((memTypeBit & memoryTypeBits) != 0)
13769 const VkMemoryPropertyFlags currFlags =
13770 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
13772 if((requiredFlags & ~currFlags) == 0)
13775 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
13777 if(currCost < minCost)
13779 *pMemoryTypeIndex = memTypeIndex;
13784 minCost = currCost;
13789 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
13794 const VkBufferCreateInfo* pBufferCreateInfo,
13796 uint32_t* pMemoryTypeIndex)
13798 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13799 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
13800 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13801 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13803 const VkDevice hDev = allocator->m_hDevice;
13804 VkBuffer hBuffer = VK_NULL_HANDLE;
13805 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
13806 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
13807 if(res == VK_SUCCESS)
13809 VkMemoryRequirements memReq = {};
13810 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
13811 hDev, hBuffer, &memReq);
13815 memReq.memoryTypeBits,
13816 pAllocationCreateInfo,
13819 allocator->GetVulkanFunctions().vkDestroyBuffer(
13820 hDev, hBuffer, allocator->GetAllocationCallbacks());
13827 const VkImageCreateInfo* pImageCreateInfo,
13829 uint32_t* pMemoryTypeIndex)
13831 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13832 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
13833 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13834 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13836 const VkDevice hDev = allocator->m_hDevice;
13837 VkImage hImage = VK_NULL_HANDLE;
13838 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
13839 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
13840 if(res == VK_SUCCESS)
13842 VkMemoryRequirements memReq = {};
13843 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
13844 hDev, hImage, &memReq);
13848 memReq.memoryTypeBits,
13849 pAllocationCreateInfo,
13852 allocator->GetVulkanFunctions().vkDestroyImage(
13853 hDev, hImage, allocator->GetAllocationCallbacks());
13863 VMA_ASSERT(allocator && pCreateInfo && pPool);
13865 VMA_DEBUG_LOG(
"vmaCreatePool");
13867 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13869 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
13871 #if VMA_RECORDING_ENABLED 13872 if(allocator->GetRecorder() != VMA_NULL)
13874 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
13885 VMA_ASSERT(allocator);
13887 if(pool == VK_NULL_HANDLE)
13892 VMA_DEBUG_LOG(
"vmaDestroyPool");
13894 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13896 #if VMA_RECORDING_ENABLED 13897 if(allocator->GetRecorder() != VMA_NULL)
13899 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
13903 allocator->DestroyPool(pool);
13911 VMA_ASSERT(allocator && pool && pPoolStats);
13913 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13915 allocator->GetPoolStats(pool, pPoolStats);
13921 size_t* pLostAllocationCount)
13923 VMA_ASSERT(allocator && pool);
13925 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13927 #if VMA_RECORDING_ENABLED 13928 if(allocator->GetRecorder() != VMA_NULL)
13930 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
13934 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
13939 VMA_ASSERT(allocator && pool);
13941 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13943 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
13945 return allocator->CheckPoolCorruption(pool);
13950 const VkMemoryRequirements* pVkMemoryRequirements,
13955 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
13957 VMA_DEBUG_LOG(
"vmaAllocateMemory");
13959 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13961 VkResult result = allocator->AllocateMemory(
13962 *pVkMemoryRequirements,
13968 VMA_SUBALLOCATION_TYPE_UNKNOWN,
13971 #if VMA_RECORDING_ENABLED 13972 if(allocator->GetRecorder() != VMA_NULL)
13974 allocator->GetRecorder()->RecordAllocateMemory(
13975 allocator->GetCurrentFrameIndex(),
13976 *pVkMemoryRequirements,
13982 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
13984 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13997 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13999 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
14001 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14003 VkMemoryRequirements vkMemReq = {};
14004 bool requiresDedicatedAllocation =
false;
14005 bool prefersDedicatedAllocation =
false;
14006 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
14007 requiresDedicatedAllocation,
14008 prefersDedicatedAllocation);
14010 VkResult result = allocator->AllocateMemory(
14012 requiresDedicatedAllocation,
14013 prefersDedicatedAllocation,
14017 VMA_SUBALLOCATION_TYPE_BUFFER,
14020 #if VMA_RECORDING_ENABLED 14021 if(allocator->GetRecorder() != VMA_NULL)
14023 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
14024 allocator->GetCurrentFrameIndex(),
14026 requiresDedicatedAllocation,
14027 prefersDedicatedAllocation,
14033 if(pAllocationInfo && result == VK_SUCCESS)
14035 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14048 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
14050 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
14052 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14054 VkMemoryRequirements vkMemReq = {};
14055 bool requiresDedicatedAllocation =
false;
14056 bool prefersDedicatedAllocation =
false;
14057 allocator->GetImageMemoryRequirements(image, vkMemReq,
14058 requiresDedicatedAllocation, prefersDedicatedAllocation);
14060 VkResult result = allocator->AllocateMemory(
14062 requiresDedicatedAllocation,
14063 prefersDedicatedAllocation,
14067 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
14070 #if VMA_RECORDING_ENABLED 14071 if(allocator->GetRecorder() != VMA_NULL)
14073 allocator->GetRecorder()->RecordAllocateMemoryForImage(
14074 allocator->GetCurrentFrameIndex(),
14076 requiresDedicatedAllocation,
14077 prefersDedicatedAllocation,
14083 if(pAllocationInfo && result == VK_SUCCESS)
14085 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14095 VMA_ASSERT(allocator);
14097 if(allocation == VK_NULL_HANDLE)
14102 VMA_DEBUG_LOG(
"vmaFreeMemory");
14104 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14106 #if VMA_RECORDING_ENABLED 14107 if(allocator->GetRecorder() != VMA_NULL)
14109 allocator->GetRecorder()->RecordFreeMemory(
14110 allocator->GetCurrentFrameIndex(),
14115 allocator->FreeMemory(allocation);
14121 VkDeviceSize newSize)
14123 VMA_ASSERT(allocator && allocation);
14125 VMA_DEBUG_LOG(
"vmaResizeAllocation");
14127 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14129 #if VMA_RECORDING_ENABLED 14130 if(allocator->GetRecorder() != VMA_NULL)
14132 allocator->GetRecorder()->RecordResizeAllocation(
14133 allocator->GetCurrentFrameIndex(),
14139 return allocator->ResizeAllocation(allocation, newSize);
14147 VMA_ASSERT(allocator && allocation && pAllocationInfo);
14149 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14151 #if VMA_RECORDING_ENABLED 14152 if(allocator->GetRecorder() != VMA_NULL)
14154 allocator->GetRecorder()->RecordGetAllocationInfo(
14155 allocator->GetCurrentFrameIndex(),
14160 allocator->GetAllocationInfo(allocation, pAllocationInfo);
14167 VMA_ASSERT(allocator && allocation);
14169 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14171 #if VMA_RECORDING_ENABLED 14172 if(allocator->GetRecorder() != VMA_NULL)
14174 allocator->GetRecorder()->RecordTouchAllocation(
14175 allocator->GetCurrentFrameIndex(),
14180 return allocator->TouchAllocation(allocation);
14188 VMA_ASSERT(allocator && allocation);
14190 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14192 allocation->SetUserData(allocator, pUserData);
14194 #if VMA_RECORDING_ENABLED 14195 if(allocator->GetRecorder() != VMA_NULL)
14197 allocator->GetRecorder()->RecordSetAllocationUserData(
14198 allocator->GetCurrentFrameIndex(),
14209 VMA_ASSERT(allocator && pAllocation);
14211 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
14213 allocator->CreateLostAllocation(pAllocation);
14215 #if VMA_RECORDING_ENABLED 14216 if(allocator->GetRecorder() != VMA_NULL)
14218 allocator->GetRecorder()->RecordCreateLostAllocation(
14219 allocator->GetCurrentFrameIndex(),
14230 VMA_ASSERT(allocator && allocation && ppData);
14232 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14234 VkResult res = allocator->Map(allocation, ppData);
14236 #if VMA_RECORDING_ENABLED 14237 if(allocator->GetRecorder() != VMA_NULL)
14239 allocator->GetRecorder()->RecordMapMemory(
14240 allocator->GetCurrentFrameIndex(),
14252 VMA_ASSERT(allocator && allocation);
14254 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14256 #if VMA_RECORDING_ENABLED 14257 if(allocator->GetRecorder() != VMA_NULL)
14259 allocator->GetRecorder()->RecordUnmapMemory(
14260 allocator->GetCurrentFrameIndex(),
14265 allocator->Unmap(allocation);
14270 VMA_ASSERT(allocator && allocation);
14272 VMA_DEBUG_LOG(
"vmaFlushAllocation");
14274 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14276 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
14278 #if VMA_RECORDING_ENABLED 14279 if(allocator->GetRecorder() != VMA_NULL)
14281 allocator->GetRecorder()->RecordFlushAllocation(
14282 allocator->GetCurrentFrameIndex(),
14283 allocation, offset, size);
14290 VMA_ASSERT(allocator && allocation);
14292 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
14294 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14296 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
14298 #if VMA_RECORDING_ENABLED 14299 if(allocator->GetRecorder() != VMA_NULL)
14301 allocator->GetRecorder()->RecordInvalidateAllocation(
14302 allocator->GetCurrentFrameIndex(),
14303 allocation, offset, size);
14310 VMA_ASSERT(allocator);
14312 VMA_DEBUG_LOG(
"vmaCheckCorruption");
14314 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14316 return allocator->CheckCorruption(memoryTypeBits);
14322 size_t allocationCount,
14323 VkBool32* pAllocationsChanged,
14327 VMA_ASSERT(allocator && pAllocations);
14329 VMA_DEBUG_LOG(
"vmaDefragment");
14331 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14333 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
14341 VMA_ASSERT(allocator && allocation && buffer);
14343 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
14345 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14347 return allocator->BindBufferMemory(allocation, buffer);
14355 VMA_ASSERT(allocator && allocation && image);
14357 VMA_DEBUG_LOG(
"vmaBindImageMemory");
14359 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14361 return allocator->BindImageMemory(allocation, image);
14366 const VkBufferCreateInfo* pBufferCreateInfo,
14372 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
14374 if(pBufferCreateInfo->size == 0)
14376 return VK_ERROR_VALIDATION_FAILED_EXT;
14379 VMA_DEBUG_LOG(
"vmaCreateBuffer");
14381 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14383 *pBuffer = VK_NULL_HANDLE;
14384 *pAllocation = VK_NULL_HANDLE;
14387 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
14388 allocator->m_hDevice,
14390 allocator->GetAllocationCallbacks(),
14395 VkMemoryRequirements vkMemReq = {};
14396 bool requiresDedicatedAllocation =
false;
14397 bool prefersDedicatedAllocation =
false;
14398 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
14399 requiresDedicatedAllocation, prefersDedicatedAllocation);
14403 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
14405 VMA_ASSERT(vkMemReq.alignment %
14406 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
14408 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
14410 VMA_ASSERT(vkMemReq.alignment %
14411 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
14413 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
14415 VMA_ASSERT(vkMemReq.alignment %
14416 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
14420 res = allocator->AllocateMemory(
14422 requiresDedicatedAllocation,
14423 prefersDedicatedAllocation,
14426 *pAllocationCreateInfo,
14427 VMA_SUBALLOCATION_TYPE_BUFFER,
14430 #if VMA_RECORDING_ENABLED 14431 if(allocator->GetRecorder() != VMA_NULL)
14433 allocator->GetRecorder()->RecordCreateBuffer(
14434 allocator->GetCurrentFrameIndex(),
14435 *pBufferCreateInfo,
14436 *pAllocationCreateInfo,
14444 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
14448 #if VMA_STATS_STRING_ENABLED 14449 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
14451 if(pAllocationInfo != VMA_NULL)
14453 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14458 allocator->FreeMemory(*pAllocation);
14459 *pAllocation = VK_NULL_HANDLE;
14460 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14461 *pBuffer = VK_NULL_HANDLE;
14464 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14465 *pBuffer = VK_NULL_HANDLE;
14476 VMA_ASSERT(allocator);
14478 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14483 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
14485 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14487 #if VMA_RECORDING_ENABLED 14488 if(allocator->GetRecorder() != VMA_NULL)
14490 allocator->GetRecorder()->RecordDestroyBuffer(
14491 allocator->GetCurrentFrameIndex(),
14496 if(buffer != VK_NULL_HANDLE)
14498 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
14501 if(allocation != VK_NULL_HANDLE)
14503 allocator->FreeMemory(allocation);
14509 const VkImageCreateInfo* pImageCreateInfo,
14515 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
14517 if(pImageCreateInfo->extent.width == 0 ||
14518 pImageCreateInfo->extent.height == 0 ||
14519 pImageCreateInfo->extent.depth == 0 ||
14520 pImageCreateInfo->mipLevels == 0 ||
14521 pImageCreateInfo->arrayLayers == 0)
14523 return VK_ERROR_VALIDATION_FAILED_EXT;
14526 VMA_DEBUG_LOG(
"vmaCreateImage");
14528 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14530 *pImage = VK_NULL_HANDLE;
14531 *pAllocation = VK_NULL_HANDLE;
14534 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
14535 allocator->m_hDevice,
14537 allocator->GetAllocationCallbacks(),
14541 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
14542 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
14543 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
14546 VkMemoryRequirements vkMemReq = {};
14547 bool requiresDedicatedAllocation =
false;
14548 bool prefersDedicatedAllocation =
false;
14549 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
14550 requiresDedicatedAllocation, prefersDedicatedAllocation);
14552 res = allocator->AllocateMemory(
14554 requiresDedicatedAllocation,
14555 prefersDedicatedAllocation,
14558 *pAllocationCreateInfo,
14562 #if VMA_RECORDING_ENABLED 14563 if(allocator->GetRecorder() != VMA_NULL)
14565 allocator->GetRecorder()->RecordCreateImage(
14566 allocator->GetCurrentFrameIndex(),
14568 *pAllocationCreateInfo,
14576 res = allocator->BindImageMemory(*pAllocation, *pImage);
14580 #if VMA_STATS_STRING_ENABLED 14581 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
14583 if(pAllocationInfo != VMA_NULL)
14585 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14590 allocator->FreeMemory(*pAllocation);
14591 *pAllocation = VK_NULL_HANDLE;
14592 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14593 *pImage = VK_NULL_HANDLE;
14596 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14597 *pImage = VK_NULL_HANDLE;
14608 VMA_ASSERT(allocator);
14610 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14615 VMA_DEBUG_LOG(
"vmaDestroyImage");
14617 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14619 #if VMA_RECORDING_ENABLED 14620 if(allocator->GetRecorder() != VMA_NULL)
14622 allocator->GetRecorder()->RecordDestroyImage(
14623 allocator->GetCurrentFrameIndex(),
14628 if(image != VK_NULL_HANDLE)
14630 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
14632 if(allocation != VK_NULL_HANDLE)
14634 allocator->FreeMemory(allocation);
14638 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1586
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1887
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1643
@@ -73,26 +73,26 @@ $(function() {
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1617
-
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2209
+
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2212
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1598
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1844
Definition: vk_mem_alloc.h:1947
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1590
-
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2309
+
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2312
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1640
-
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2579
-
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2098
+
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2582
+
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2101
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1487
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
-
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2190
+
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2193
Definition: vk_mem_alloc.h:1924
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1579
-
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1997
+
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2000
Definition: vk_mem_alloc.h:1871
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1652
-
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2126
+
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2129
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1705
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1637
@@ -102,41 +102,41 @@ $(function() {
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1777
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1595
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1776
-
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2583
+
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2586
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1669
VmaStatInfo total
Definition: vk_mem_alloc.h:1786
-
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2591
-
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1981
-
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2574
+
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2594
+
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1984
+
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2577
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1596
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1521
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1646
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
-
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2140
-
Definition: vk_mem_alloc.h:2134
+
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2143
+
Definition: vk_mem_alloc.h:2137
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1712
-
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2319
+
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2322
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1591
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1615
-
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2018
-
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2160
-
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2196
+
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2021
+
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2163
+
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2199
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1577
-
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2143
+
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2146
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1822
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
-
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2569
+
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2572
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
-
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2587
+
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2590
Definition: vk_mem_alloc.h:1861
-
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2005
+
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2008
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1594
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
@@ -144,43 +144,43 @@ $(function() {
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1782
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1527
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
-
Definition: vk_mem_alloc.h:1965
+
Definition: vk_mem_alloc.h:1968
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1548
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1619
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1553
-
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2589
+
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2592
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
-
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1992
-
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2206
+
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1995
+
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2209
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1587
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1765
-
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2155
+
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2158
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1540
-
Definition: vk_mem_alloc.h:2130
+
Definition: vk_mem_alloc.h:2133
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1931
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1778
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1544
-
Definition: vk_mem_alloc.h:1955
-
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2146
+
Definition: vk_mem_alloc.h:1958
+
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2149
Definition: vk_mem_alloc.h:1870
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1593
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
-
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1987
-
Definition: vk_mem_alloc.h:1978
+
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1990
+
Definition: vk_mem_alloc.h:1981
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1768
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1589
-
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2168
+
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2171
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1655
-
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2199
-
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1976
-
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2011
+
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2202
+
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1979
+
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2014
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1693
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1784
@@ -192,62 +192,62 @@ $(function() {
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1542
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1599
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
-
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2182
+
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2185
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1592
Definition: vk_mem_alloc.h:1942
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1633
-
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2333
+
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2336
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1649
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1777
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1774
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
-
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2187
+
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2190
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
Definition: vk_mem_alloc.h:1951
-
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2314
-
Definition: vk_mem_alloc.h:1962
-
Definition: vk_mem_alloc.h:1974
-
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2585
+
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2317
+
Definition: vk_mem_alloc.h:1965
+
Definition: vk_mem_alloc.h:1977
+
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2588
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1585
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1772
Definition: vk_mem_alloc.h:1827
-
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2136
+
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2139
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1622
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1770
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1597
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1601
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1898
-
Definition: vk_mem_alloc.h:1969
+
Definition: vk_mem_alloc.h:1972
Definition: vk_mem_alloc.h:1854
-
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2328
+
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2331
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1575
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1588
-
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2115
+
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2118
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
-
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2295
+
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2298
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
-
Definition: vk_mem_alloc.h:1959
-
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2080
+
Definition: vk_mem_alloc.h:1962
+
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2083
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1778
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1937
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1609
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1785
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
-
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2193
+
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2196
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1778
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
-
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2300
+
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2303