23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1768 #ifndef VMA_RECORDING_ENABLED
1769 #define VMA_RECORDING_ENABLED 0
1773 #define NOMINMAX // For windows.h
1777 #include <vulkan/vulkan.h>
1780 #if VMA_RECORDING_ENABLED
1781 #include <windows.h>
1787 #if !defined(VMA_VULKAN_VERSION)
1788 #if defined(VK_VERSION_1_1)
1789 #define VMA_VULKAN_VERSION 1001000
1791 #define VMA_VULKAN_VERSION 1000000
1795 #if !defined(VMA_DEDICATED_ALLOCATION)
1796 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1797 #define VMA_DEDICATED_ALLOCATION 1
1799 #define VMA_DEDICATED_ALLOCATION 0
1803 #if !defined(VMA_BIND_MEMORY2)
1804 #if VK_KHR_bind_memory2
1805 #define VMA_BIND_MEMORY2 1
1807 #define VMA_BIND_MEMORY2 0
1811 #if !defined(VMA_MEMORY_BUDGET)
1812 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
1813 #define VMA_MEMORY_BUDGET 1
1815 #define VMA_MEMORY_BUDGET 0
1824 #ifndef VMA_CALL_PRE
1825 #define VMA_CALL_PRE
1827 #ifndef VMA_CALL_POST
1828 #define VMA_CALL_POST
1845 uint32_t memoryType,
1846 VkDeviceMemory memory,
1851 uint32_t memoryType,
1852 VkDeviceMemory memory,
1955 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
1956 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1957 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1959 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
1960 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
1961 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
1963 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
1964 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
2106 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
2114 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
2124 uint32_t memoryTypeIndex,
2125 VkMemoryPropertyFlags* pFlags);
2137 uint32_t frameIndex);
2233 #ifndef VMA_STATS_STRING_ENABLED
2234 #define VMA_STATS_STRING_ENABLED 1
2237 #if VMA_STATS_STRING_ENABLED
2244 char** ppStatsString,
2245 VkBool32 detailedMap);
2249 char* pStatsString);
2251 #endif // #if VMA_STATS_STRING_ENABLED
2503 uint32_t memoryTypeBits,
2505 uint32_t* pMemoryTypeIndex);
2521 const VkBufferCreateInfo* pBufferCreateInfo,
2523 uint32_t* pMemoryTypeIndex);
2539 const VkImageCreateInfo* pImageCreateInfo,
2541 uint32_t* pMemoryTypeIndex);
2713 size_t* pLostAllocationCount);
2740 const char** ppName);
2833 const VkMemoryRequirements* pVkMemoryRequirements,
2859 const VkMemoryRequirements* pVkMemoryRequirements,
2861 size_t allocationCount,
2906 size_t allocationCount,
2918 VkDeviceSize newSize);
3310 size_t allocationCount,
3311 VkBool32* pAllocationsChanged,
3345 VkDeviceSize allocationLocalOffset,
3379 VkDeviceSize allocationLocalOffset,
3411 const VkBufferCreateInfo* pBufferCreateInfo,
3436 const VkImageCreateInfo* pImageCreateInfo,
3462 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3465 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3466 #define VMA_IMPLEMENTATION
3469 #ifdef VMA_IMPLEMENTATION
3470 #undef VMA_IMPLEMENTATION
3492 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3493 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3505 #if VMA_USE_STL_CONTAINERS
3506 #define VMA_USE_STL_VECTOR 1
3507 #define VMA_USE_STL_UNORDERED_MAP 1
3508 #define VMA_USE_STL_LIST 1
3511 #ifndef VMA_USE_STL_SHARED_MUTEX
3513 #if __cplusplus >= 201703L
3514 #define VMA_USE_STL_SHARED_MUTEX 1
3518 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3519 #define VMA_USE_STL_SHARED_MUTEX 1
3521 #define VMA_USE_STL_SHARED_MUTEX 0
3529 #if VMA_USE_STL_VECTOR
3533 #if VMA_USE_STL_UNORDERED_MAP
3534 #include <unordered_map>
3537 #if VMA_USE_STL_LIST
3546 #include <algorithm>
3551 #define VMA_NULL nullptr
3554 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3556 void *aligned_alloc(
size_t alignment,
size_t size)
3559 if(alignment <
sizeof(
void*))
3561 alignment =
sizeof(
void*);
3564 return memalign(alignment, size);
3566 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3568 void *aligned_alloc(
size_t alignment,
size_t size)
3571 if(alignment <
sizeof(
void*))
3573 alignment =
sizeof(
void*);
3577 if(posix_memalign(&pointer, alignment, size) == 0)
3591 #define VMA_ASSERT(expr)
3593 #define VMA_ASSERT(expr) assert(expr)
3599 #ifndef VMA_HEAVY_ASSERT
3601 #define VMA_HEAVY_ASSERT(expr)
3603 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3607 #ifndef VMA_ALIGN_OF
3608 #define VMA_ALIGN_OF(type) (__alignof(type))
3611 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3613 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3615 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3619 #ifndef VMA_SYSTEM_FREE
3621 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3623 #define VMA_SYSTEM_FREE(ptr) free(ptr)
3628 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
3632 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
3636 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
3640 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
3643 #ifndef VMA_DEBUG_LOG
3644 #define VMA_DEBUG_LOG(format, ...)
3654 #if VMA_STATS_STRING_ENABLED
3655 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3657 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3659 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3661 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3663 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3665 snprintf(outStr, strLen,
"%p", ptr);
3673 void Lock() { m_Mutex.lock(); }
3674 void Unlock() { m_Mutex.unlock(); }
3678 #define VMA_MUTEX VmaMutex
3682 #ifndef VMA_RW_MUTEX
3683 #if VMA_USE_STL_SHARED_MUTEX
3685 #include <shared_mutex>
3689 void LockRead() { m_Mutex.lock_shared(); }
3690 void UnlockRead() { m_Mutex.unlock_shared(); }
3691 void LockWrite() { m_Mutex.lock(); }
3692 void UnlockWrite() { m_Mutex.unlock(); }
3694 std::shared_mutex m_Mutex;
3696 #define VMA_RW_MUTEX VmaRWMutex
3697 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
3703 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3704 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3705 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3706 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3707 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3711 #define VMA_RW_MUTEX VmaRWMutex
3717 void LockRead() { m_Mutex.Lock(); }
3718 void UnlockRead() { m_Mutex.Unlock(); }
3719 void LockWrite() { m_Mutex.Lock(); }
3720 void UnlockWrite() { m_Mutex.Unlock(); }
3724 #define VMA_RW_MUTEX VmaRWMutex
3725 #endif // #if VMA_USE_STL_SHARED_MUTEX
3726 #endif // #ifndef VMA_RW_MUTEX
3731 #ifndef VMA_ATOMIC_UINT32
3733 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
3736 #ifndef VMA_ATOMIC_UINT64
3738 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
3741 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
3746 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
3749 #ifndef VMA_DEBUG_ALIGNMENT
3754 #define VMA_DEBUG_ALIGNMENT (1)
3757 #ifndef VMA_DEBUG_MARGIN
3762 #define VMA_DEBUG_MARGIN (0)
3765 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
3770 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
3773 #ifndef VMA_DEBUG_DETECT_CORRUPTION
3779 #define VMA_DEBUG_DETECT_CORRUPTION (0)
3782 #ifndef VMA_DEBUG_GLOBAL_MUTEX
3787 #define VMA_DEBUG_GLOBAL_MUTEX (0)
3790 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
3795 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
3798 #ifndef VMA_SMALL_HEAP_MAX_SIZE
3799 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
3803 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
3804 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
3808 #ifndef VMA_CLASS_NO_COPY
3809 #define VMA_CLASS_NO_COPY(className) \
3811 className(const className&) = delete; \
3812 className& operator=(const className&) = delete;
3815 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3818 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3820 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3821 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3827 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3829 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3830 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3833 static inline uint32_t VmaCountBitsSet(uint32_t v)
3835 uint32_t c = v - ((v >> 1) & 0x55555555);
3836 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3837 c = ((c >> 4) + c) & 0x0F0F0F0F;
3838 c = ((c >> 8) + c) & 0x00FF00FF;
3839 c = ((c >> 16) + c) & 0x0000FFFF;
3845 template <
typename T>
3846 static inline T VmaAlignUp(T val, T align)
3848 return (val + align - 1) / align * align;
3852 template <
typename T>
3853 static inline T VmaAlignDown(T val, T align)
3855 return val / align * align;
3859 template <
typename T>
3860 static inline T VmaRoundDiv(T x, T y)
3862 return (x + (y / (T)2)) / y;
3870 template <
typename T>
3871 inline bool VmaIsPow2(T x)
3873 return (x & (x-1)) == 0;
3877 static inline uint32_t VmaNextPow2(uint32_t v)
3888 static inline uint64_t VmaNextPow2(uint64_t v)
3902 static inline uint32_t VmaPrevPow2(uint32_t v)
3912 static inline uint64_t VmaPrevPow2(uint64_t v)
3924 static inline bool VmaStrIsEmpty(
const char* pStr)
3926 return pStr == VMA_NULL || *pStr ==
'\0';
3929 #if VMA_STATS_STRING_ENABLED
3931 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3947 #endif // #if VMA_STATS_STRING_ENABLED
3951 template<
typename Iterator,
typename Compare>
3952 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3954 Iterator centerValue = end; --centerValue;
3955 Iterator insertIndex = beg;
3956 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3958 if(cmp(*memTypeIndex, *centerValue))
3960 if(insertIndex != memTypeIndex)
3962 VMA_SWAP(*memTypeIndex, *insertIndex);
3967 if(insertIndex != centerValue)
3969 VMA_SWAP(*insertIndex, *centerValue);
3974 template<
typename Iterator,
typename Compare>
3975 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3979 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3980 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3981 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3985 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
3987 #endif // #ifndef VMA_SORT
3996 static inline bool VmaBlocksOnSamePage(
3997 VkDeviceSize resourceAOffset,
3998 VkDeviceSize resourceASize,
3999 VkDeviceSize resourceBOffset,
4000 VkDeviceSize pageSize)
4002 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4003 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4004 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4005 VkDeviceSize resourceBStart = resourceBOffset;
4006 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4007 return resourceAEndPage == resourceBStartPage;
4010 enum VmaSuballocationType
4012 VMA_SUBALLOCATION_TYPE_FREE = 0,
4013 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4014 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4015 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4016 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4017 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4018 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4027 static inline bool VmaIsBufferImageGranularityConflict(
4028 VmaSuballocationType suballocType1,
4029 VmaSuballocationType suballocType2)
4031 if(suballocType1 > suballocType2)
4033 VMA_SWAP(suballocType1, suballocType2);
4036 switch(suballocType1)
4038 case VMA_SUBALLOCATION_TYPE_FREE:
4040 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4042 case VMA_SUBALLOCATION_TYPE_BUFFER:
4044 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4045 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4046 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4048 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4049 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4050 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4051 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4053 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4054 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4062 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4064 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4065 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4066 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4067 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4069 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4076 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4078 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4079 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4080 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4081 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4083 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4096 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4098 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4099 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4100 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4101 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4107 VMA_CLASS_NO_COPY(VmaMutexLock)
4109 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4110 m_pMutex(useMutex ? &mutex : VMA_NULL)
4111 {
if(m_pMutex) { m_pMutex->Lock(); } }
4113 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4115 VMA_MUTEX* m_pMutex;
4119 struct VmaMutexLockRead
4121 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4123 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4124 m_pMutex(useMutex ? &mutex : VMA_NULL)
4125 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4126 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4128 VMA_RW_MUTEX* m_pMutex;
4132 struct VmaMutexLockWrite
4134 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4136 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4137 m_pMutex(useMutex ? &mutex : VMA_NULL)
4138 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4139 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4141 VMA_RW_MUTEX* m_pMutex;
4144 #if VMA_DEBUG_GLOBAL_MUTEX
4145 static VMA_MUTEX gDebugGlobalMutex;
4146 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4148 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4152 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4163 template <
typename CmpLess,
typename IterT,
typename KeyT>
4164 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4166 size_t down = 0, up = (end - beg);
4169 const size_t mid = (down + up) / 2;
4170 if(cmp(*(beg+mid), key))
4182 template<
typename CmpLess,
typename IterT,
typename KeyT>
4183 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4185 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4186 beg, end, value, cmp);
4188 (!cmp(*it, value) && !cmp(value, *it)))
4200 template<
typename T>
4201 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4203 for(uint32_t i = 0; i < count; ++i)
4205 const T iPtr = arr[i];
4206 if(iPtr == VMA_NULL)
4210 for(uint32_t j = i + 1; j < count; ++j)
4224 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4226 if((pAllocationCallbacks != VMA_NULL) &&
4227 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4229 return (*pAllocationCallbacks->pfnAllocation)(
4230 pAllocationCallbacks->pUserData,
4233 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4237 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4241 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4243 if((pAllocationCallbacks != VMA_NULL) &&
4244 (pAllocationCallbacks->pfnFree != VMA_NULL))
4246 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4250 VMA_SYSTEM_FREE(ptr);
4254 template<
typename T>
4255 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4257 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4260 template<
typename T>
4261 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4263 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4266 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4268 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4270 template<
typename T>
4271 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4274 VmaFree(pAllocationCallbacks, ptr);
4277 template<
typename T>
4278 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4282 for(
size_t i = count; i--; )
4286 VmaFree(pAllocationCallbacks, ptr);
4290 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4292 if(srcStr != VMA_NULL)
4294 const size_t len = strlen(srcStr);
4295 char*
const result = vma_new_array(allocs,
char, len + 1);
4296 memcpy(result, srcStr, len + 1);
4305 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4309 const size_t len = strlen(str);
4310 vma_delete_array(allocs, str, len + 1);
4315 template<
typename T>
4316 class VmaStlAllocator
4319 const VkAllocationCallbacks*
const m_pCallbacks;
4320 typedef T value_type;
4322 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4323 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4325 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4326 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4328 template<
typename U>
4329 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4331 return m_pCallbacks == rhs.m_pCallbacks;
4333 template<
typename U>
4334 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4336 return m_pCallbacks != rhs.m_pCallbacks;
4339 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4342 #if VMA_USE_STL_VECTOR
4344 #define VmaVector std::vector
4346 template<
typename T,
typename allocatorT>
4347 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4349 vec.insert(vec.begin() + index, item);
4352 template<
typename T,
typename allocatorT>
4353 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4355 vec.erase(vec.begin() + index);
4358 #else // #if VMA_USE_STL_VECTOR
4363 template<
typename T,
typename AllocatorT>
4367 typedef T value_type;
4369 VmaVector(
const AllocatorT& allocator) :
4370 m_Allocator(allocator),
4377 VmaVector(
size_t count,
const AllocatorT& allocator) :
4378 m_Allocator(allocator),
4379 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4387 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4388 : VmaVector(count, allocator) {}
4390 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4391 m_Allocator(src.m_Allocator),
4392 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4393 m_Count(src.m_Count),
4394 m_Capacity(src.m_Count)
4398 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4404 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4407 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4411 resize(rhs.m_Count);
4414 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4420 bool empty()
const {
return m_Count == 0; }
4421 size_t size()
const {
return m_Count; }
4422 T* data() {
return m_pArray; }
4423 const T* data()
const {
return m_pArray; }
4425 T& operator[](
size_t index)
4427 VMA_HEAVY_ASSERT(index < m_Count);
4428 return m_pArray[index];
4430 const T& operator[](
size_t index)
const
4432 VMA_HEAVY_ASSERT(index < m_Count);
4433 return m_pArray[index];
4438 VMA_HEAVY_ASSERT(m_Count > 0);
4441 const T& front()
const
4443 VMA_HEAVY_ASSERT(m_Count > 0);
4448 VMA_HEAVY_ASSERT(m_Count > 0);
4449 return m_pArray[m_Count - 1];
4451 const T& back()
const
4453 VMA_HEAVY_ASSERT(m_Count > 0);
4454 return m_pArray[m_Count - 1];
4457 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4459 newCapacity = VMA_MAX(newCapacity, m_Count);
4461 if((newCapacity < m_Capacity) && !freeMemory)
4463 newCapacity = m_Capacity;
4466 if(newCapacity != m_Capacity)
4468 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4471 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4473 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4474 m_Capacity = newCapacity;
4475 m_pArray = newArray;
4479 void resize(
size_t newCount,
bool freeMemory =
false)
4481 size_t newCapacity = m_Capacity;
4482 if(newCount > m_Capacity)
4484 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4488 newCapacity = newCount;
4491 if(newCapacity != m_Capacity)
4493 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4494 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4495 if(elementsToCopy != 0)
4497 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4499 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4500 m_Capacity = newCapacity;
4501 m_pArray = newArray;
4507 void clear(
bool freeMemory =
false)
4509 resize(0, freeMemory);
4512 void insert(
size_t index,
const T& src)
4514 VMA_HEAVY_ASSERT(index <= m_Count);
4515 const size_t oldCount = size();
4516 resize(oldCount + 1);
4517 if(index < oldCount)
4519 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4521 m_pArray[index] = src;
4524 void remove(
size_t index)
4526 VMA_HEAVY_ASSERT(index < m_Count);
4527 const size_t oldCount = size();
4528 if(index < oldCount - 1)
4530 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4532 resize(oldCount - 1);
4535 void push_back(
const T& src)
4537 const size_t newIndex = size();
4538 resize(newIndex + 1);
4539 m_pArray[newIndex] = src;
4544 VMA_HEAVY_ASSERT(m_Count > 0);
4548 void push_front(
const T& src)
4555 VMA_HEAVY_ASSERT(m_Count > 0);
4559 typedef T* iterator;
4561 iterator begin() {
return m_pArray; }
4562 iterator end() {
return m_pArray + m_Count; }
4565 AllocatorT m_Allocator;
4571 template<
typename T,
typename allocatorT>
4572 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4574 vec.insert(index, item);
4577 template<
typename T,
typename allocatorT>
4578 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4583 #endif // #if VMA_USE_STL_VECTOR
4585 template<
typename CmpLess,
typename VectorT>
4586 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4588 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4590 vector.data() + vector.size(),
4592 CmpLess()) - vector.data();
4593 VmaVectorInsert(vector, indexToInsert, value);
4594 return indexToInsert;
4597 template<
typename CmpLess,
typename VectorT>
4598 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4601 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4606 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4608 size_t indexToRemove = it - vector.begin();
4609 VmaVectorRemove(vector, indexToRemove);
4623 template<
typename T>
4624 class VmaPoolAllocator
4626 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4628 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4629 ~VmaPoolAllocator();
4636 uint32_t NextFreeIndex;
4637 alignas(T)
char Value[
sizeof(T)];
4644 uint32_t FirstFreeIndex;
4647 const VkAllocationCallbacks* m_pAllocationCallbacks;
4648 const uint32_t m_FirstBlockCapacity;
4649 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4651 ItemBlock& CreateNewBlock();
4654 template<
typename T>
4655 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4656 m_pAllocationCallbacks(pAllocationCallbacks),
4657 m_FirstBlockCapacity(firstBlockCapacity),
4658 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4660 VMA_ASSERT(m_FirstBlockCapacity > 1);
4663 template<
typename T>
4664 VmaPoolAllocator<T>::~VmaPoolAllocator()
4666 for(
size_t i = m_ItemBlocks.size(); i--; )
4667 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4668 m_ItemBlocks.clear();
4671 template<
typename T>
4672 T* VmaPoolAllocator<T>::Alloc()
4674 for(
size_t i = m_ItemBlocks.size(); i--; )
4676 ItemBlock& block = m_ItemBlocks[i];
4678 if(block.FirstFreeIndex != UINT32_MAX)
4680 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4681 block.FirstFreeIndex = pItem->NextFreeIndex;
4682 T* result = (T*)&pItem->Value;
4689 ItemBlock& newBlock = CreateNewBlock();
4690 Item*
const pItem = &newBlock.pItems[0];
4691 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4692 T* result = (T*)&pItem->Value;
4697 template<
typename T>
4698 void VmaPoolAllocator<T>::Free(T* ptr)
4701 for(
size_t i = m_ItemBlocks.size(); i--; )
4703 ItemBlock& block = m_ItemBlocks[i];
4707 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4710 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4713 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4714 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4715 block.FirstFreeIndex = index;
4719 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4722 template<
typename T>
4723 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4725 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4726 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4728 const ItemBlock newBlock = {
4729 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4733 m_ItemBlocks.push_back(newBlock);
4736 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4737 newBlock.pItems[i].NextFreeIndex = i + 1;
4738 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4739 return m_ItemBlocks.back();
4745 #if VMA_USE_STL_LIST
4747 #define VmaList std::list
4749 #else // #if VMA_USE_STL_LIST
4751 template<
typename T>
4760 template<
typename T>
4763 VMA_CLASS_NO_COPY(VmaRawList)
4765 typedef VmaListItem<T> ItemType;
4767 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4771 size_t GetCount()
const {
return m_Count; }
4772 bool IsEmpty()
const {
return m_Count == 0; }
4774 ItemType* Front() {
return m_pFront; }
4775 const ItemType* Front()
const {
return m_pFront; }
4776 ItemType* Back() {
return m_pBack; }
4777 const ItemType* Back()
const {
return m_pBack; }
4779 ItemType* PushBack();
4780 ItemType* PushFront();
4781 ItemType* PushBack(
const T& value);
4782 ItemType* PushFront(
const T& value);
4787 ItemType* InsertBefore(ItemType* pItem);
4789 ItemType* InsertAfter(ItemType* pItem);
4791 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4792 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4794 void Remove(ItemType* pItem);
4797 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4798 VmaPoolAllocator<ItemType> m_ItemAllocator;
4804 template<
typename T>
4805 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4806 m_pAllocationCallbacks(pAllocationCallbacks),
4807 m_ItemAllocator(pAllocationCallbacks, 128),
4814 template<
typename T>
4815 VmaRawList<T>::~VmaRawList()
4821 template<
typename T>
4822 void VmaRawList<T>::Clear()
4824 if(IsEmpty() ==
false)
4826 ItemType* pItem = m_pBack;
4827 while(pItem != VMA_NULL)
4829 ItemType*
const pPrevItem = pItem->pPrev;
4830 m_ItemAllocator.Free(pItem);
4833 m_pFront = VMA_NULL;
4839 template<
typename T>
4840 VmaListItem<T>* VmaRawList<T>::PushBack()
4842 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4843 pNewItem->pNext = VMA_NULL;
4846 pNewItem->pPrev = VMA_NULL;
4847 m_pFront = pNewItem;
4853 pNewItem->pPrev = m_pBack;
4854 m_pBack->pNext = pNewItem;
4861 template<
typename T>
4862 VmaListItem<T>* VmaRawList<T>::PushFront()
4864 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4865 pNewItem->pPrev = VMA_NULL;
4868 pNewItem->pNext = VMA_NULL;
4869 m_pFront = pNewItem;
4875 pNewItem->pNext = m_pFront;
4876 m_pFront->pPrev = pNewItem;
4877 m_pFront = pNewItem;
4883 template<
typename T>
4884 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4886 ItemType*
const pNewItem = PushBack();
4887 pNewItem->Value = value;
4891 template<
typename T>
4892 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4894 ItemType*
const pNewItem = PushFront();
4895 pNewItem->Value = value;
4899 template<
typename T>
4900 void VmaRawList<T>::PopBack()
4902 VMA_HEAVY_ASSERT(m_Count > 0);
4903 ItemType*
const pBackItem = m_pBack;
4904 ItemType*
const pPrevItem = pBackItem->pPrev;
4905 if(pPrevItem != VMA_NULL)
4907 pPrevItem->pNext = VMA_NULL;
4909 m_pBack = pPrevItem;
4910 m_ItemAllocator.Free(pBackItem);
4914 template<
typename T>
4915 void VmaRawList<T>::PopFront()
4917 VMA_HEAVY_ASSERT(m_Count > 0);
4918 ItemType*
const pFrontItem = m_pFront;
4919 ItemType*
const pNextItem = pFrontItem->pNext;
4920 if(pNextItem != VMA_NULL)
4922 pNextItem->pPrev = VMA_NULL;
4924 m_pFront = pNextItem;
4925 m_ItemAllocator.Free(pFrontItem);
4929 template<
typename T>
4930 void VmaRawList<T>::Remove(ItemType* pItem)
4932 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4933 VMA_HEAVY_ASSERT(m_Count > 0);
4935 if(pItem->pPrev != VMA_NULL)
4937 pItem->pPrev->pNext = pItem->pNext;
4941 VMA_HEAVY_ASSERT(m_pFront == pItem);
4942 m_pFront = pItem->pNext;
4945 if(pItem->pNext != VMA_NULL)
4947 pItem->pNext->pPrev = pItem->pPrev;
4951 VMA_HEAVY_ASSERT(m_pBack == pItem);
4952 m_pBack = pItem->pPrev;
4955 m_ItemAllocator.Free(pItem);
4959 template<
typename T>
4960 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4962 if(pItem != VMA_NULL)
4964 ItemType*
const prevItem = pItem->pPrev;
4965 ItemType*
const newItem = m_ItemAllocator.Alloc();
4966 newItem->pPrev = prevItem;
4967 newItem->pNext = pItem;
4968 pItem->pPrev = newItem;
4969 if(prevItem != VMA_NULL)
4971 prevItem->pNext = newItem;
4975 VMA_HEAVY_ASSERT(m_pFront == pItem);
4985 template<
typename T>
4986 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4988 if(pItem != VMA_NULL)
4990 ItemType*
const nextItem = pItem->pNext;
4991 ItemType*
const newItem = m_ItemAllocator.Alloc();
4992 newItem->pNext = nextItem;
4993 newItem->pPrev = pItem;
4994 pItem->pNext = newItem;
4995 if(nextItem != VMA_NULL)
4997 nextItem->pPrev = newItem;
5001 VMA_HEAVY_ASSERT(m_pBack == pItem);
5011 template<
typename T>
5012 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5014 ItemType*
const newItem = InsertBefore(pItem);
5015 newItem->Value = value;
5019 template<
typename T>
5020 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5022 ItemType*
const newItem = InsertAfter(pItem);
5023 newItem->Value = value;
5027 template<
typename T,
typename AllocatorT>
5030 VMA_CLASS_NO_COPY(VmaList)
5041 T& operator*()
const
5043 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5044 return m_pItem->Value;
5046 T* operator->()
const
5048 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5049 return &m_pItem->Value;
5052 iterator& operator++()
5054 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5055 m_pItem = m_pItem->pNext;
5058 iterator& operator--()
5060 if(m_pItem != VMA_NULL)
5062 m_pItem = m_pItem->pPrev;
5066 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5067 m_pItem = m_pList->Back();
5072 iterator operator++(
int)
5074 iterator result = *
this;
5078 iterator operator--(
int)
5080 iterator result = *
this;
5085 bool operator==(
const iterator& rhs)
const
5087 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5088 return m_pItem == rhs.m_pItem;
5090 bool operator!=(
const iterator& rhs)
const
5092 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5093 return m_pItem != rhs.m_pItem;
5097 VmaRawList<T>* m_pList;
5098 VmaListItem<T>* m_pItem;
5100 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5106 friend class VmaList<T, AllocatorT>;
5109 class const_iterator
5118 const_iterator(
const iterator& src) :
5119 m_pList(src.m_pList),
5120 m_pItem(src.m_pItem)
5124 const T& operator*()
const
5126 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5127 return m_pItem->Value;
5129 const T* operator->()
const
5131 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5132 return &m_pItem->Value;
5135 const_iterator& operator++()
5137 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5138 m_pItem = m_pItem->pNext;
5141 const_iterator& operator--()
5143 if(m_pItem != VMA_NULL)
5145 m_pItem = m_pItem->pPrev;
5149 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5150 m_pItem = m_pList->Back();
5155 const_iterator operator++(
int)
5157 const_iterator result = *
this;
5161 const_iterator operator--(
int)
5163 const_iterator result = *
this;
5168 bool operator==(
const const_iterator& rhs)
const
5170 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5171 return m_pItem == rhs.m_pItem;
5173 bool operator!=(
const const_iterator& rhs)
const
5175 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5176 return m_pItem != rhs.m_pItem;
5180 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5186 const VmaRawList<T>* m_pList;
5187 const VmaListItem<T>* m_pItem;
5189 friend class VmaList<T, AllocatorT>;
5192 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5194 bool empty()
const {
return m_RawList.IsEmpty(); }
5195 size_t size()
const {
return m_RawList.GetCount(); }
5197 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5198 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5200 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5201 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5203 void clear() { m_RawList.Clear(); }
5204 void push_back(
const T& value) { m_RawList.PushBack(value); }
5205 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5206 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5209 VmaRawList<T> m_RawList;
5212 #endif // #if VMA_USE_STL_LIST
5220 #if VMA_USE_STL_UNORDERED_MAP
5222 #define VmaPair std::pair
5224 #define VMA_MAP_TYPE(KeyT, ValueT) \
5225 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5227 #else // #if VMA_USE_STL_UNORDERED_MAP
5229 template<
typename T1,
typename T2>
5235 VmaPair() : first(), second() { }
5236 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5242 template<
typename KeyT,
typename ValueT>
5246 typedef VmaPair<KeyT, ValueT> PairType;
5247 typedef PairType* iterator;
5249 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5251 iterator begin() {
return m_Vector.begin(); }
5252 iterator end() {
return m_Vector.end(); }
5254 void insert(
const PairType& pair);
5255 iterator find(
const KeyT& key);
5256 void erase(iterator it);
5259 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5262 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5264 template<
typename FirstT,
typename SecondT>
5265 struct VmaPairFirstLess
5267 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5269 return lhs.first < rhs.first;
5271 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5273 return lhs.first < rhsFirst;
5277 template<
typename KeyT,
typename ValueT>
5278 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5280 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5282 m_Vector.data() + m_Vector.size(),
5284 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5285 VmaVectorInsert(m_Vector, indexToInsert, pair);
5288 template<
typename KeyT,
typename ValueT>
5289 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5291 PairType* it = VmaBinaryFindFirstNotLess(
5293 m_Vector.data() + m_Vector.size(),
5295 VmaPairFirstLess<KeyT, ValueT>());
5296 if((it != m_Vector.end()) && (it->first == key))
5302 return m_Vector.end();
5306 template<
typename KeyT,
typename ValueT>
5307 void VmaMap<KeyT, ValueT>::erase(iterator it)
5309 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5312 #endif // #if VMA_USE_STL_UNORDERED_MAP
5318 class VmaDeviceMemoryBlock;
5320 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5322 struct VmaAllocation_T
5325 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5329 FLAG_USER_DATA_STRING = 0x01,
5333 enum ALLOCATION_TYPE
5335 ALLOCATION_TYPE_NONE,
5336 ALLOCATION_TYPE_BLOCK,
5337 ALLOCATION_TYPE_DEDICATED,
5344 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
5348 m_MemoryTypeIndex = 0;
5349 m_pUserData = VMA_NULL;
5350 m_LastUseFrameIndex = currentFrameIndex;
5351 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
5352 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
5354 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
5356 #if VMA_STATS_STRING_ENABLED
5357 m_CreationFrameIndex = currentFrameIndex;
5358 m_BufferImageUsage = 0;
5364 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5367 VMA_ASSERT(m_pUserData == VMA_NULL);
5370 void InitBlockAllocation(
5371 VmaDeviceMemoryBlock* block,
5372 VkDeviceSize offset,
5373 VkDeviceSize alignment,
5375 uint32_t memoryTypeIndex,
5376 VmaSuballocationType suballocationType,
5380 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5381 VMA_ASSERT(block != VMA_NULL);
5382 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5383 m_Alignment = alignment;
5385 m_MemoryTypeIndex = memoryTypeIndex;
5386 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5387 m_SuballocationType = (uint8_t)suballocationType;
5388 m_BlockAllocation.m_Block = block;
5389 m_BlockAllocation.m_Offset = offset;
5390 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5395 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5396 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5397 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5398 m_MemoryTypeIndex = 0;
5399 m_BlockAllocation.m_Block = VMA_NULL;
5400 m_BlockAllocation.m_Offset = 0;
5401 m_BlockAllocation.m_CanBecomeLost =
true;
5404 void ChangeBlockAllocation(
5406 VmaDeviceMemoryBlock* block,
5407 VkDeviceSize offset);
5409 void ChangeOffset(VkDeviceSize newOffset);
5412 void InitDedicatedAllocation(
5413 uint32_t memoryTypeIndex,
5414 VkDeviceMemory hMemory,
5415 VmaSuballocationType suballocationType,
5419 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5420 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5421 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5424 m_MemoryTypeIndex = memoryTypeIndex;
5425 m_SuballocationType = (uint8_t)suballocationType;
5426 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5427 m_DedicatedAllocation.m_hMemory = hMemory;
5428 m_DedicatedAllocation.m_pMappedData = pMappedData;
5431 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5432 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5433 VkDeviceSize GetSize()
const {
return m_Size; }
5434 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5435 void* GetUserData()
const {
return m_pUserData; }
5436 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5437 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5439 VmaDeviceMemoryBlock* GetBlock()
const
5441 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5442 return m_BlockAllocation.m_Block;
5444 VkDeviceSize GetOffset()
const;
5445 VkDeviceMemory GetMemory()
const;
5446 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5447 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5448 void* GetMappedData()
const;
5449 bool CanBecomeLost()
const;
5451 uint32_t GetLastUseFrameIndex()
const
5453 return m_LastUseFrameIndex.load();
5455 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5457 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5467 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5469 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5471 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5482 void BlockAllocMap();
5483 void BlockAllocUnmap();
5484 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5487 #if VMA_STATS_STRING_ENABLED
5488 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5489 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5491 void InitBufferImageUsage(uint32_t bufferImageUsage)
5493 VMA_ASSERT(m_BufferImageUsage == 0);
5494 m_BufferImageUsage = bufferImageUsage;
5497 void PrintParameters(
class VmaJsonWriter& json)
const;
5501 VkDeviceSize m_Alignment;
5502 VkDeviceSize m_Size;
5504 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5505 uint32_t m_MemoryTypeIndex;
5507 uint8_t m_SuballocationType;
5514 struct BlockAllocation
5516 VmaDeviceMemoryBlock* m_Block;
5517 VkDeviceSize m_Offset;
5518 bool m_CanBecomeLost;
5522 struct DedicatedAllocation
5524 VkDeviceMemory m_hMemory;
5525 void* m_pMappedData;
5531 BlockAllocation m_BlockAllocation;
5533 DedicatedAllocation m_DedicatedAllocation;
5536 #if VMA_STATS_STRING_ENABLED
5537 uint32_t m_CreationFrameIndex;
5538 uint32_t m_BufferImageUsage;
5548 struct VmaSuballocation
5550 VkDeviceSize offset;
5553 VmaSuballocationType type;
5557 struct VmaSuballocationOffsetLess
5559 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5561 return lhs.offset < rhs.offset;
5564 struct VmaSuballocationOffsetGreater
5566 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5568 return lhs.offset > rhs.offset;
5572 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5575 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5577 enum class VmaAllocationRequestType
5599 struct VmaAllocationRequest
5601 VkDeviceSize offset;
5602 VkDeviceSize sumFreeSize;
5603 VkDeviceSize sumItemSize;
5604 VmaSuballocationList::iterator item;
5605 size_t itemsToMakeLostCount;
5607 VmaAllocationRequestType type;
5609 VkDeviceSize CalcCost()
const
5611 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5619 class VmaBlockMetadata
5623 virtual ~VmaBlockMetadata() { }
5624 virtual void Init(VkDeviceSize size) { m_Size = size; }
5627 virtual bool Validate()
const = 0;
5628 VkDeviceSize GetSize()
const {
return m_Size; }
5629 virtual size_t GetAllocationCount()
const = 0;
5630 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5631 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5633 virtual bool IsEmpty()
const = 0;
5635 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5637 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5639 #if VMA_STATS_STRING_ENABLED
5640 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5646 virtual bool CreateAllocationRequest(
5647 uint32_t currentFrameIndex,
5648 uint32_t frameInUseCount,
5649 VkDeviceSize bufferImageGranularity,
5650 VkDeviceSize allocSize,
5651 VkDeviceSize allocAlignment,
5653 VmaSuballocationType allocType,
5654 bool canMakeOtherLost,
5657 VmaAllocationRequest* pAllocationRequest) = 0;
5659 virtual bool MakeRequestedAllocationsLost(
5660 uint32_t currentFrameIndex,
5661 uint32_t frameInUseCount,
5662 VmaAllocationRequest* pAllocationRequest) = 0;
5664 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5666 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5670 const VmaAllocationRequest& request,
5671 VmaSuballocationType type,
5672 VkDeviceSize allocSize,
5677 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5680 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5682 #if VMA_STATS_STRING_ENABLED
5683 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5684 VkDeviceSize unusedBytes,
5685 size_t allocationCount,
5686 size_t unusedRangeCount)
const;
5687 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5688 VkDeviceSize offset,
5690 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5691 VkDeviceSize offset,
5692 VkDeviceSize size)
const;
5693 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5697 VkDeviceSize m_Size;
5698 const VkAllocationCallbacks* m_pAllocationCallbacks;
5701 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
5702 VMA_ASSERT(0 && "Validation failed: " #cond); \
5706 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5708 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5711 virtual ~VmaBlockMetadata_Generic();
5712 virtual void Init(VkDeviceSize size);
5714 virtual bool Validate()
const;
5715 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5716 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5717 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5718 virtual bool IsEmpty()
const;
5720 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5721 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5723 #if VMA_STATS_STRING_ENABLED
5724 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5727 virtual bool CreateAllocationRequest(
5728 uint32_t currentFrameIndex,
5729 uint32_t frameInUseCount,
5730 VkDeviceSize bufferImageGranularity,
5731 VkDeviceSize allocSize,
5732 VkDeviceSize allocAlignment,
5734 VmaSuballocationType allocType,
5735 bool canMakeOtherLost,
5737 VmaAllocationRequest* pAllocationRequest);
5739 virtual bool MakeRequestedAllocationsLost(
5740 uint32_t currentFrameIndex,
5741 uint32_t frameInUseCount,
5742 VmaAllocationRequest* pAllocationRequest);
5744 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5746 virtual VkResult CheckCorruption(
const void* pBlockData);
5749 const VmaAllocationRequest& request,
5750 VmaSuballocationType type,
5751 VkDeviceSize allocSize,
5755 virtual void FreeAtOffset(VkDeviceSize offset);
5760 bool IsBufferImageGranularityConflictPossible(
5761 VkDeviceSize bufferImageGranularity,
5762 VmaSuballocationType& inOutPrevSuballocType)
const;
5765 friend class VmaDefragmentationAlgorithm_Generic;
5766 friend class VmaDefragmentationAlgorithm_Fast;
5768 uint32_t m_FreeCount;
5769 VkDeviceSize m_SumFreeSize;
5770 VmaSuballocationList m_Suballocations;
5773 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5775 bool ValidateFreeSuballocationList()
const;
5779 bool CheckAllocation(
5780 uint32_t currentFrameIndex,
5781 uint32_t frameInUseCount,
5782 VkDeviceSize bufferImageGranularity,
5783 VkDeviceSize allocSize,
5784 VkDeviceSize allocAlignment,
5785 VmaSuballocationType allocType,
5786 VmaSuballocationList::const_iterator suballocItem,
5787 bool canMakeOtherLost,
5788 VkDeviceSize* pOffset,
5789 size_t* itemsToMakeLostCount,
5790 VkDeviceSize* pSumFreeSize,
5791 VkDeviceSize* pSumItemSize)
const;
5793 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5797 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5800 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5803 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5884 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5886 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5889 virtual ~VmaBlockMetadata_Linear();
5890 virtual void Init(VkDeviceSize size);
5892 virtual bool Validate()
const;
5893 virtual size_t GetAllocationCount()
const;
5894 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5895 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5896 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5898 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5899 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5901 #if VMA_STATS_STRING_ENABLED
5902 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5905 virtual bool CreateAllocationRequest(
5906 uint32_t currentFrameIndex,
5907 uint32_t frameInUseCount,
5908 VkDeviceSize bufferImageGranularity,
5909 VkDeviceSize allocSize,
5910 VkDeviceSize allocAlignment,
5912 VmaSuballocationType allocType,
5913 bool canMakeOtherLost,
5915 VmaAllocationRequest* pAllocationRequest);
5917 virtual bool MakeRequestedAllocationsLost(
5918 uint32_t currentFrameIndex,
5919 uint32_t frameInUseCount,
5920 VmaAllocationRequest* pAllocationRequest);
5922 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5924 virtual VkResult CheckCorruption(
const void* pBlockData);
5927 const VmaAllocationRequest& request,
5928 VmaSuballocationType type,
5929 VkDeviceSize allocSize,
5933 virtual void FreeAtOffset(VkDeviceSize offset);
5943 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5945 enum SECOND_VECTOR_MODE
5947 SECOND_VECTOR_EMPTY,
5952 SECOND_VECTOR_RING_BUFFER,
5958 SECOND_VECTOR_DOUBLE_STACK,
5961 VkDeviceSize m_SumFreeSize;
5962 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5963 uint32_t m_1stVectorIndex;
5964 SECOND_VECTOR_MODE m_2ndVectorMode;
5966 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5967 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5968 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5969 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5972 size_t m_1stNullItemsBeginCount;
5974 size_t m_1stNullItemsMiddleCount;
5976 size_t m_2ndNullItemsCount;
5978 bool ShouldCompact1st()
const;
5979 void CleanupAfterFree();
5981 bool CreateAllocationRequest_LowerAddress(
5982 uint32_t currentFrameIndex,
5983 uint32_t frameInUseCount,
5984 VkDeviceSize bufferImageGranularity,
5985 VkDeviceSize allocSize,
5986 VkDeviceSize allocAlignment,
5987 VmaSuballocationType allocType,
5988 bool canMakeOtherLost,
5990 VmaAllocationRequest* pAllocationRequest);
5991 bool CreateAllocationRequest_UpperAddress(
5992 uint32_t currentFrameIndex,
5993 uint32_t frameInUseCount,
5994 VkDeviceSize bufferImageGranularity,
5995 VkDeviceSize allocSize,
5996 VkDeviceSize allocAlignment,
5997 VmaSuballocationType allocType,
5998 bool canMakeOtherLost,
6000 VmaAllocationRequest* pAllocationRequest);
6014 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6016 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6019 virtual ~VmaBlockMetadata_Buddy();
6020 virtual void Init(VkDeviceSize size);
6022 virtual bool Validate()
const;
6023 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6024 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6025 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6026 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6028 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6029 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6031 #if VMA_STATS_STRING_ENABLED
6032 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6035 virtual bool CreateAllocationRequest(
6036 uint32_t currentFrameIndex,
6037 uint32_t frameInUseCount,
6038 VkDeviceSize bufferImageGranularity,
6039 VkDeviceSize allocSize,
6040 VkDeviceSize allocAlignment,
6042 VmaSuballocationType allocType,
6043 bool canMakeOtherLost,
6045 VmaAllocationRequest* pAllocationRequest);
6047 virtual bool MakeRequestedAllocationsLost(
6048 uint32_t currentFrameIndex,
6049 uint32_t frameInUseCount,
6050 VmaAllocationRequest* pAllocationRequest);
6052 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6054 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6057 const VmaAllocationRequest& request,
6058 VmaSuballocationType type,
6059 VkDeviceSize allocSize,
6062 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6063 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6066 static const VkDeviceSize MIN_NODE_SIZE = 32;
6067 static const size_t MAX_LEVELS = 30;
6069 struct ValidationContext
6071 size_t calculatedAllocationCount;
6072 size_t calculatedFreeCount;
6073 VkDeviceSize calculatedSumFreeSize;
6075 ValidationContext() :
6076 calculatedAllocationCount(0),
6077 calculatedFreeCount(0),
6078 calculatedSumFreeSize(0) { }
6083 VkDeviceSize offset;
6113 VkDeviceSize m_UsableSize;
6114 uint32_t m_LevelCount;
6120 } m_FreeList[MAX_LEVELS];
6122 size_t m_AllocationCount;
6126 VkDeviceSize m_SumFreeSize;
6128 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6129 void DeleteNode(Node* node);
6130 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6131 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6132 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6134 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6135 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6139 void AddToFreeListFront(uint32_t level, Node* node);
6143 void RemoveFromFreeList(uint32_t level, Node* node);
6145 #if VMA_STATS_STRING_ENABLED
6146 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6156 class VmaDeviceMemoryBlock
6158 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6160 VmaBlockMetadata* m_pMetadata;
6164 ~VmaDeviceMemoryBlock()
6166 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6167 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6174 uint32_t newMemoryTypeIndex,
6175 VkDeviceMemory newMemory,
6176 VkDeviceSize newSize,
6178 uint32_t algorithm);
6182 VmaPool GetParentPool()
const {
return m_hParentPool; }
6183 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6184 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6185 uint32_t GetId()
const {
return m_Id; }
6186 void* GetMappedData()
const {
return m_pMappedData; }
6189 bool Validate()
const;
6194 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6197 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6198 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6200 VkResult BindBufferMemory(
6203 VkDeviceSize allocationLocalOffset,
6206 VkResult BindImageMemory(
6209 VkDeviceSize allocationLocalOffset,
6215 uint32_t m_MemoryTypeIndex;
6217 VkDeviceMemory m_hMemory;
6225 uint32_t m_MapCount;
6226 void* m_pMappedData;
6229 struct VmaPointerLess
6231 bool operator()(
const void* lhs,
const void* rhs)
const
6237 struct VmaDefragmentationMove
6239 size_t srcBlockIndex;
6240 size_t dstBlockIndex;
6241 VkDeviceSize srcOffset;
6242 VkDeviceSize dstOffset;
6246 class VmaDefragmentationAlgorithm;
6254 struct VmaBlockVector
6256 VMA_CLASS_NO_COPY(VmaBlockVector)
6261 uint32_t memoryTypeIndex,
6262 VkDeviceSize preferredBlockSize,
6263 size_t minBlockCount,
6264 size_t maxBlockCount,
6265 VkDeviceSize bufferImageGranularity,
6266 uint32_t frameInUseCount,
6267 bool explicitBlockSize,
6268 uint32_t algorithm);
6271 VkResult CreateMinBlocks();
6273 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6274 VmaPool GetParentPool()
const {
return m_hParentPool; }
6275 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6276 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6277 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6278 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6279 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6280 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6285 bool IsCorruptionDetectionEnabled()
const;
6288 uint32_t currentFrameIndex,
6290 VkDeviceSize alignment,
6292 VmaSuballocationType suballocType,
6293 size_t allocationCount,
6301 #if VMA_STATS_STRING_ENABLED
6302 void PrintDetailedMap(
class VmaJsonWriter& json);
6305 void MakePoolAllocationsLost(
6306 uint32_t currentFrameIndex,
6307 size_t* pLostAllocationCount);
6308 VkResult CheckCorruption();
6312 class VmaBlockVectorDefragmentationContext* pCtx,
6314 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6315 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6316 VkCommandBuffer commandBuffer);
6317 void DefragmentationEnd(
6318 class VmaBlockVectorDefragmentationContext* pCtx,
6324 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6325 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6326 size_t CalcAllocationCount()
const;
6327 bool IsBufferImageGranularityConflictPossible()
const;
6330 friend class VmaDefragmentationAlgorithm_Generic;
6334 const uint32_t m_MemoryTypeIndex;
6335 const VkDeviceSize m_PreferredBlockSize;
6336 const size_t m_MinBlockCount;
6337 const size_t m_MaxBlockCount;
6338 const VkDeviceSize m_BufferImageGranularity;
6339 const uint32_t m_FrameInUseCount;
6340 const bool m_ExplicitBlockSize;
6341 const uint32_t m_Algorithm;
6342 VMA_RW_MUTEX m_Mutex;
6346 bool m_HasEmptyBlock;
6348 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6349 uint32_t m_NextBlockId;
6351 VkDeviceSize CalcMaxBlockSize()
const;
6354 void Remove(VmaDeviceMemoryBlock* pBlock);
6358 void IncrementallySortBlocks();
6360 VkResult AllocatePage(
6361 uint32_t currentFrameIndex,
6363 VkDeviceSize alignment,
6365 VmaSuballocationType suballocType,
6369 VkResult AllocateFromBlock(
6370 VmaDeviceMemoryBlock* pBlock,
6371 uint32_t currentFrameIndex,
6373 VkDeviceSize alignment,
6376 VmaSuballocationType suballocType,
6380 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6383 void ApplyDefragmentationMovesCpu(
6384 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6385 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6387 void ApplyDefragmentationMovesGpu(
6388 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6389 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6390 VkCommandBuffer commandBuffer);
6398 void UpdateHasEmptyBlock();
6403 VMA_CLASS_NO_COPY(VmaPool_T)
6405 VmaBlockVector m_BlockVector;
6410 VkDeviceSize preferredBlockSize);
6413 uint32_t GetId()
const {
return m_Id; }
6414 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6416 const char* GetName()
const {
return m_Name; }
6417 void SetName(
const char* pName);
6419 #if VMA_STATS_STRING_ENABLED
6435 class VmaDefragmentationAlgorithm
6437 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6439 VmaDefragmentationAlgorithm(
6441 VmaBlockVector* pBlockVector,
6442 uint32_t currentFrameIndex) :
6443 m_hAllocator(hAllocator),
6444 m_pBlockVector(pBlockVector),
6445 m_CurrentFrameIndex(currentFrameIndex)
6448 virtual ~VmaDefragmentationAlgorithm()
6452 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6453 virtual void AddAll() = 0;
6455 virtual VkResult Defragment(
6456 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6457 VkDeviceSize maxBytesToMove,
6458 uint32_t maxAllocationsToMove) = 0;
6460 virtual VkDeviceSize GetBytesMoved()
const = 0;
6461 virtual uint32_t GetAllocationsMoved()
const = 0;
6465 VmaBlockVector*
const m_pBlockVector;
6466 const uint32_t m_CurrentFrameIndex;
6468 struct AllocationInfo
6471 VkBool32* m_pChanged;
6474 m_hAllocation(VK_NULL_HANDLE),
6475 m_pChanged(VMA_NULL)
6479 m_hAllocation(hAlloc),
6480 m_pChanged(pChanged)
6486 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6488 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6490 VmaDefragmentationAlgorithm_Generic(
6492 VmaBlockVector* pBlockVector,
6493 uint32_t currentFrameIndex,
6494 bool overlappingMoveSupported);
6495 virtual ~VmaDefragmentationAlgorithm_Generic();
6497 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6498 virtual void AddAll() { m_AllAllocations =
true; }
6500 virtual VkResult Defragment(
6501 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6502 VkDeviceSize maxBytesToMove,
6503 uint32_t maxAllocationsToMove);
6505 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6506 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6509 uint32_t m_AllocationCount;
6510 bool m_AllAllocations;
6512 VkDeviceSize m_BytesMoved;
6513 uint32_t m_AllocationsMoved;
6515 struct AllocationInfoSizeGreater
6517 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6519 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6523 struct AllocationInfoOffsetGreater
6525 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6527 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6533 size_t m_OriginalBlockIndex;
6534 VmaDeviceMemoryBlock* m_pBlock;
6535 bool m_HasNonMovableAllocations;
6536 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6538 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6539 m_OriginalBlockIndex(SIZE_MAX),
6541 m_HasNonMovableAllocations(true),
6542 m_Allocations(pAllocationCallbacks)
6546 void CalcHasNonMovableAllocations()
6548 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6549 const size_t defragmentAllocCount = m_Allocations.size();
6550 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6553 void SortAllocationsBySizeDescending()
6555 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6558 void SortAllocationsByOffsetDescending()
6560 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6564 struct BlockPointerLess
6566 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
6568 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6570 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6572 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6578 struct BlockInfoCompareMoveDestination
6580 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6582 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6586 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6590 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6598 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6599 BlockInfoVector m_Blocks;
6601 VkResult DefragmentRound(
6602 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6603 VkDeviceSize maxBytesToMove,
6604 uint32_t maxAllocationsToMove);
6606 size_t CalcBlocksWithNonMovableCount()
const;
6608 static bool MoveMakesSense(
6609 size_t dstBlockIndex, VkDeviceSize dstOffset,
6610 size_t srcBlockIndex, VkDeviceSize srcOffset);
6613 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6615 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6617 VmaDefragmentationAlgorithm_Fast(
6619 VmaBlockVector* pBlockVector,
6620 uint32_t currentFrameIndex,
6621 bool overlappingMoveSupported);
6622 virtual ~VmaDefragmentationAlgorithm_Fast();
6624 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6625 virtual void AddAll() { m_AllAllocations =
true; }
6627 virtual VkResult Defragment(
6628 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6629 VkDeviceSize maxBytesToMove,
6630 uint32_t maxAllocationsToMove);
6632 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6633 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6638 size_t origBlockIndex;
6641 class FreeSpaceDatabase
6647 s.blockInfoIndex = SIZE_MAX;
6648 for(
size_t i = 0; i < MAX_COUNT; ++i)
6650 m_FreeSpaces[i] = s;
6654 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6656 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6662 size_t bestIndex = SIZE_MAX;
6663 for(
size_t i = 0; i < MAX_COUNT; ++i)
6666 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6671 if(m_FreeSpaces[i].size < size &&
6672 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6678 if(bestIndex != SIZE_MAX)
6680 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6681 m_FreeSpaces[bestIndex].offset = offset;
6682 m_FreeSpaces[bestIndex].size = size;
6686 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6687 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6689 size_t bestIndex = SIZE_MAX;
6690 VkDeviceSize bestFreeSpaceAfter = 0;
6691 for(
size_t i = 0; i < MAX_COUNT; ++i)
6694 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6696 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6698 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6700 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6702 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6705 bestFreeSpaceAfter = freeSpaceAfter;
6711 if(bestIndex != SIZE_MAX)
6713 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6714 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6716 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6719 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6720 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6721 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6726 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6736 static const size_t MAX_COUNT = 4;
6740 size_t blockInfoIndex;
6741 VkDeviceSize offset;
6743 } m_FreeSpaces[MAX_COUNT];
6746 const bool m_OverlappingMoveSupported;
6748 uint32_t m_AllocationCount;
6749 bool m_AllAllocations;
6751 VkDeviceSize m_BytesMoved;
6752 uint32_t m_AllocationsMoved;
6754 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6756 void PreprocessMetadata();
6757 void PostprocessMetadata();
6758 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6761 struct VmaBlockDefragmentationContext
6765 BLOCK_FLAG_USED = 0x00000001,
6771 class VmaBlockVectorDefragmentationContext
6773 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6777 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6779 VmaBlockVectorDefragmentationContext(
6782 VmaBlockVector* pBlockVector,
6783 uint32_t currFrameIndex);
6784 ~VmaBlockVectorDefragmentationContext();
6786 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6787 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6788 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6790 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6791 void AddAll() { m_AllAllocations =
true; }
6793 void Begin(
bool overlappingMoveSupported);
6800 VmaBlockVector*
const m_pBlockVector;
6801 const uint32_t m_CurrFrameIndex;
6803 VmaDefragmentationAlgorithm* m_pAlgorithm;
6811 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6812 bool m_AllAllocations;
6815 struct VmaDefragmentationContext_T
6818 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6820 VmaDefragmentationContext_T(
6822 uint32_t currFrameIndex,
6825 ~VmaDefragmentationContext_T();
6827 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6828 void AddAllocations(
6829 uint32_t allocationCount,
6831 VkBool32* pAllocationsChanged);
6839 VkResult Defragment(
6840 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6841 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6846 const uint32_t m_CurrFrameIndex;
6847 const uint32_t m_Flags;
6850 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6852 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6855 #if VMA_RECORDING_ENABLED
6862 void WriteConfiguration(
6863 const VkPhysicalDeviceProperties& devProps,
6864 const VkPhysicalDeviceMemoryProperties& memProps,
6865 uint32_t vulkanApiVersion,
6866 bool dedicatedAllocationExtensionEnabled,
6867 bool bindMemory2ExtensionEnabled,
6868 bool memoryBudgetExtensionEnabled);
6871 void RecordCreateAllocator(uint32_t frameIndex);
6872 void RecordDestroyAllocator(uint32_t frameIndex);
6873 void RecordCreatePool(uint32_t frameIndex,
6876 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6877 void RecordAllocateMemory(uint32_t frameIndex,
6878 const VkMemoryRequirements& vkMemReq,
6881 void RecordAllocateMemoryPages(uint32_t frameIndex,
6882 const VkMemoryRequirements& vkMemReq,
6884 uint64_t allocationCount,
6886 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6887 const VkMemoryRequirements& vkMemReq,
6888 bool requiresDedicatedAllocation,
6889 bool prefersDedicatedAllocation,
6892 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6893 const VkMemoryRequirements& vkMemReq,
6894 bool requiresDedicatedAllocation,
6895 bool prefersDedicatedAllocation,
6898 void RecordFreeMemory(uint32_t frameIndex,
6900 void RecordFreeMemoryPages(uint32_t frameIndex,
6901 uint64_t allocationCount,
6903 void RecordSetAllocationUserData(uint32_t frameIndex,
6905 const void* pUserData);
6906 void RecordCreateLostAllocation(uint32_t frameIndex,
6908 void RecordMapMemory(uint32_t frameIndex,
6910 void RecordUnmapMemory(uint32_t frameIndex,
6912 void RecordFlushAllocation(uint32_t frameIndex,
6913 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6914 void RecordInvalidateAllocation(uint32_t frameIndex,
6915 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6916 void RecordCreateBuffer(uint32_t frameIndex,
6917 const VkBufferCreateInfo& bufCreateInfo,
6920 void RecordCreateImage(uint32_t frameIndex,
6921 const VkImageCreateInfo& imageCreateInfo,
6924 void RecordDestroyBuffer(uint32_t frameIndex,
6926 void RecordDestroyImage(uint32_t frameIndex,
6928 void RecordTouchAllocation(uint32_t frameIndex,
6930 void RecordGetAllocationInfo(uint32_t frameIndex,
6932 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6934 void RecordDefragmentationBegin(uint32_t frameIndex,
6937 void RecordDefragmentationEnd(uint32_t frameIndex,
6939 void RecordSetPoolName(uint32_t frameIndex,
6950 class UserDataString
6954 const char* GetString()
const {
return m_Str; }
6964 VMA_MUTEX m_FileMutex;
6966 int64_t m_StartCounter;
6968 void GetBasicParams(CallParams& outParams);
6971 template<
typename T>
6972 void PrintPointerList(uint64_t count,
const T* pItems)
6976 fprintf(m_File,
"%p", pItems[0]);
6977 for(uint64_t i = 1; i < count; ++i)
6979 fprintf(m_File,
" %p", pItems[i]);
6984 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6988 #endif // #if VMA_RECORDING_ENABLED
6993 class VmaAllocationObjectAllocator
6995 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6997 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7004 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7007 struct VmaCurrentBudgetData
7009 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7010 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7012 #if VMA_MEMORY_BUDGET
7013 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7014 VMA_RW_MUTEX m_BudgetMutex;
7015 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7016 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7017 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7018 #endif // #if VMA_MEMORY_BUDGET
7020 VmaCurrentBudgetData()
7022 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7024 m_BlockBytes[heapIndex] = 0;
7025 m_AllocationBytes[heapIndex] = 0;
7026 #if VMA_MEMORY_BUDGET
7027 m_VulkanUsage[heapIndex] = 0;
7028 m_VulkanBudget[heapIndex] = 0;
7029 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7033 #if VMA_MEMORY_BUDGET
7034 m_OperationsSinceBudgetFetch = 0;
7038 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7040 m_AllocationBytes[heapIndex] += allocationSize;
7041 #if VMA_MEMORY_BUDGET
7042 ++m_OperationsSinceBudgetFetch;
7046 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7048 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7049 m_AllocationBytes[heapIndex] -= allocationSize;
7050 #if VMA_MEMORY_BUDGET
7051 ++m_OperationsSinceBudgetFetch;
7057 struct VmaAllocator_T
7059 VMA_CLASS_NO_COPY(VmaAllocator_T)
7062 uint32_t m_VulkanApiVersion;
7063 bool m_UseKhrDedicatedAllocation;
7064 bool m_UseKhrBindMemory2;
7065 bool m_UseExtMemoryBudget;
7067 VkInstance m_hInstance;
7068 bool m_AllocationCallbacksSpecified;
7069 VkAllocationCallbacks m_AllocationCallbacks;
7071 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7074 uint32_t m_HeapSizeLimitMask;
7076 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7077 VkPhysicalDeviceMemoryProperties m_MemProps;
7080 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7083 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7084 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7085 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7087 VmaCurrentBudgetData m_Budget;
7093 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7095 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7099 return m_VulkanFunctions;
7102 VkDeviceSize GetBufferImageGranularity()
const
7105 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7106 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7109 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7110 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7112 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7114 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7115 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7118 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7120 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7121 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7124 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7126 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7127 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7128 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7131 bool IsIntegratedGpu()
const
7133 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7136 #if VMA_RECORDING_ENABLED
7137 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7140 void GetBufferMemoryRequirements(
7142 VkMemoryRequirements& memReq,
7143 bool& requiresDedicatedAllocation,
7144 bool& prefersDedicatedAllocation)
const;
7145 void GetImageMemoryRequirements(
7147 VkMemoryRequirements& memReq,
7148 bool& requiresDedicatedAllocation,
7149 bool& prefersDedicatedAllocation)
const;
7152 VkResult AllocateMemory(
7153 const VkMemoryRequirements& vkMemReq,
7154 bool requiresDedicatedAllocation,
7155 bool prefersDedicatedAllocation,
7156 VkBuffer dedicatedBuffer,
7157 VkImage dedicatedImage,
7159 VmaSuballocationType suballocType,
7160 size_t allocationCount,
7165 size_t allocationCount,
7168 VkResult ResizeAllocation(
7170 VkDeviceSize newSize);
7172 void CalculateStats(
VmaStats* pStats);
7175 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7177 #if VMA_STATS_STRING_ENABLED
7178 void PrintDetailedMap(
class VmaJsonWriter& json);
7181 VkResult DefragmentationBegin(
7185 VkResult DefragmentationEnd(
7192 void DestroyPool(
VmaPool pool);
7195 void SetCurrentFrameIndex(uint32_t frameIndex);
7196 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7198 void MakePoolAllocationsLost(
7200 size_t* pLostAllocationCount);
7201 VkResult CheckPoolCorruption(
VmaPool hPool);
7202 VkResult CheckCorruption(uint32_t memoryTypeBits);
7207 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7209 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7211 VkResult BindVulkanBuffer(
7212 VkDeviceMemory memory,
7213 VkDeviceSize memoryOffset,
7217 VkResult BindVulkanImage(
7218 VkDeviceMemory memory,
7219 VkDeviceSize memoryOffset,
7226 VkResult BindBufferMemory(
7228 VkDeviceSize allocationLocalOffset,
7231 VkResult BindImageMemory(
7233 VkDeviceSize allocationLocalOffset,
7237 void FlushOrInvalidateAllocation(
7239 VkDeviceSize offset, VkDeviceSize size,
7240 VMA_CACHE_OPERATION op);
7242 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7248 uint32_t GetGpuDefragmentationMemoryTypeBits();
7251 VkDeviceSize m_PreferredLargeHeapBlockSize;
7253 VkPhysicalDevice m_PhysicalDevice;
7254 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7255 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7257 VMA_RW_MUTEX m_PoolsMutex;
7259 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7260 uint32_t m_NextPoolId;
7264 #if VMA_RECORDING_ENABLED
7265 VmaRecorder* m_pRecorder;
7270 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7272 VkResult AllocateMemoryOfType(
7274 VkDeviceSize alignment,
7275 bool dedicatedAllocation,
7276 VkBuffer dedicatedBuffer,
7277 VkImage dedicatedImage,
7279 uint32_t memTypeIndex,
7280 VmaSuballocationType suballocType,
7281 size_t allocationCount,
7285 VkResult AllocateDedicatedMemoryPage(
7287 VmaSuballocationType suballocType,
7288 uint32_t memTypeIndex,
7289 const VkMemoryAllocateInfo& allocInfo,
7291 bool isUserDataString,
7296 VkResult AllocateDedicatedMemory(
7298 VmaSuballocationType suballocType,
7299 uint32_t memTypeIndex,
7302 bool isUserDataString,
7304 VkBuffer dedicatedBuffer,
7305 VkImage dedicatedImage,
7306 size_t allocationCount,
7315 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7317 #if VMA_MEMORY_BUDGET
7318 void UpdateVulkanBudget();
7319 #endif // #if VMA_MEMORY_BUDGET
7325 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7327 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7330 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7332 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7335 template<
typename T>
7338 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7341 template<
typename T>
7342 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7344 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7347 template<
typename T>
7348 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7353 VmaFree(hAllocator, ptr);
7357 template<
typename T>
7358 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7362 for(
size_t i = count; i--; )
7364 VmaFree(hAllocator, ptr);
7371 #if VMA_STATS_STRING_ENABLED
7373 class VmaStringBuilder
7376 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7377 size_t GetLength()
const {
return m_Data.size(); }
7378 const char* GetData()
const {
return m_Data.data(); }
7380 void Add(
char ch) { m_Data.push_back(ch); }
7381 void Add(
const char* pStr);
7382 void AddNewLine() { Add(
'\n'); }
7383 void AddNumber(uint32_t num);
7384 void AddNumber(uint64_t num);
7385 void AddPointer(
const void* ptr);
7388 VmaVector< char, VmaStlAllocator<char> > m_Data;
7391 void VmaStringBuilder::Add(
const char* pStr)
7393 const size_t strLen = strlen(pStr);
7396 const size_t oldCount = m_Data.size();
7397 m_Data.resize(oldCount + strLen);
7398 memcpy(m_Data.data() + oldCount, pStr, strLen);
7402 void VmaStringBuilder::AddNumber(uint32_t num)
7409 *--p =
'0' + (num % 10);
7416 void VmaStringBuilder::AddNumber(uint64_t num)
7423 *--p =
'0' + (num % 10);
7430 void VmaStringBuilder::AddPointer(
const void* ptr)
7433 VmaPtrToStr(buf,
sizeof(buf), ptr);
7437 #endif // #if VMA_STATS_STRING_ENABLED
7442 #if VMA_STATS_STRING_ENABLED
7446 VMA_CLASS_NO_COPY(VmaJsonWriter)
7448 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7451 void BeginObject(
bool singleLine =
false);
7454 void BeginArray(
bool singleLine =
false);
7457 void WriteString(
const char* pStr);
7458 void BeginString(
const char* pStr = VMA_NULL);
7459 void ContinueString(
const char* pStr);
7460 void ContinueString(uint32_t n);
7461 void ContinueString(uint64_t n);
7462 void ContinueString_Pointer(
const void* ptr);
7463 void EndString(
const char* pStr = VMA_NULL);
7465 void WriteNumber(uint32_t n);
7466 void WriteNumber(uint64_t n);
7467 void WriteBool(
bool b);
7471 static const char*
const INDENT;
7473 enum COLLECTION_TYPE
7475 COLLECTION_TYPE_OBJECT,
7476 COLLECTION_TYPE_ARRAY,
7480 COLLECTION_TYPE type;
7481 uint32_t valueCount;
7482 bool singleLineMode;
7485 VmaStringBuilder& m_SB;
7486 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7487 bool m_InsideString;
7489 void BeginValue(
bool isString);
7490 void WriteIndent(
bool oneLess =
false);
7493 const char*
const VmaJsonWriter::INDENT =
" ";
7495 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7497 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7498 m_InsideString(false)
7502 VmaJsonWriter::~VmaJsonWriter()
7504 VMA_ASSERT(!m_InsideString);
7505 VMA_ASSERT(m_Stack.empty());
7508 void VmaJsonWriter::BeginObject(
bool singleLine)
7510 VMA_ASSERT(!m_InsideString);
7516 item.type = COLLECTION_TYPE_OBJECT;
7517 item.valueCount = 0;
7518 item.singleLineMode = singleLine;
7519 m_Stack.push_back(item);
7522 void VmaJsonWriter::EndObject()
7524 VMA_ASSERT(!m_InsideString);
7529 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7533 void VmaJsonWriter::BeginArray(
bool singleLine)
7535 VMA_ASSERT(!m_InsideString);
7541 item.type = COLLECTION_TYPE_ARRAY;
7542 item.valueCount = 0;
7543 item.singleLineMode = singleLine;
7544 m_Stack.push_back(item);
7547 void VmaJsonWriter::EndArray()
7549 VMA_ASSERT(!m_InsideString);
7554 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7558 void VmaJsonWriter::WriteString(
const char* pStr)
7564 void VmaJsonWriter::BeginString(
const char* pStr)
7566 VMA_ASSERT(!m_InsideString);
7570 m_InsideString =
true;
7571 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7573 ContinueString(pStr);
7577 void VmaJsonWriter::ContinueString(
const char* pStr)
7579 VMA_ASSERT(m_InsideString);
7581 const size_t strLen = strlen(pStr);
7582 for(
size_t i = 0; i < strLen; ++i)
7615 VMA_ASSERT(0 &&
"Character not currently supported.");
7621 void VmaJsonWriter::ContinueString(uint32_t n)
7623 VMA_ASSERT(m_InsideString);
7627 void VmaJsonWriter::ContinueString(uint64_t n)
7629 VMA_ASSERT(m_InsideString);
7633 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7635 VMA_ASSERT(m_InsideString);
7636 m_SB.AddPointer(ptr);
7639 void VmaJsonWriter::EndString(
const char* pStr)
7641 VMA_ASSERT(m_InsideString);
7642 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7644 ContinueString(pStr);
7647 m_InsideString =
false;
7650 void VmaJsonWriter::WriteNumber(uint32_t n)
7652 VMA_ASSERT(!m_InsideString);
7657 void VmaJsonWriter::WriteNumber(uint64_t n)
7659 VMA_ASSERT(!m_InsideString);
7664 void VmaJsonWriter::WriteBool(
bool b)
7666 VMA_ASSERT(!m_InsideString);
7668 m_SB.Add(b ?
"true" :
"false");
7671 void VmaJsonWriter::WriteNull()
7673 VMA_ASSERT(!m_InsideString);
7678 void VmaJsonWriter::BeginValue(
bool isString)
7680 if(!m_Stack.empty())
7682 StackItem& currItem = m_Stack.back();
7683 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7684 currItem.valueCount % 2 == 0)
7686 VMA_ASSERT(isString);
7689 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7690 currItem.valueCount % 2 != 0)
7694 else if(currItem.valueCount > 0)
7703 ++currItem.valueCount;
7707 void VmaJsonWriter::WriteIndent(
bool oneLess)
7709 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7713 size_t count = m_Stack.size();
7714 if(count > 0 && oneLess)
7718 for(
size_t i = 0; i < count; ++i)
7725 #endif // #if VMA_STATS_STRING_ENABLED
7729 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7731 if(IsUserDataString())
7733 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7735 FreeUserDataString(hAllocator);
7737 if(pUserData != VMA_NULL)
7739 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
7744 m_pUserData = pUserData;
7748 void VmaAllocation_T::ChangeBlockAllocation(
7750 VmaDeviceMemoryBlock* block,
7751 VkDeviceSize offset)
7753 VMA_ASSERT(block != VMA_NULL);
7754 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7757 if(block != m_BlockAllocation.m_Block)
7759 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7760 if(IsPersistentMap())
7762 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7763 block->Map(hAllocator, mapRefCount, VMA_NULL);
7766 m_BlockAllocation.m_Block = block;
7767 m_BlockAllocation.m_Offset = offset;
7770 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7772 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7773 m_BlockAllocation.m_Offset = newOffset;
7776 VkDeviceSize VmaAllocation_T::GetOffset()
const
7780 case ALLOCATION_TYPE_BLOCK:
7781 return m_BlockAllocation.m_Offset;
7782 case ALLOCATION_TYPE_DEDICATED:
7790 VkDeviceMemory VmaAllocation_T::GetMemory()
const
7794 case ALLOCATION_TYPE_BLOCK:
7795 return m_BlockAllocation.m_Block->GetDeviceMemory();
7796 case ALLOCATION_TYPE_DEDICATED:
7797 return m_DedicatedAllocation.m_hMemory;
7800 return VK_NULL_HANDLE;
7804 void* VmaAllocation_T::GetMappedData()
const
7808 case ALLOCATION_TYPE_BLOCK:
7811 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7812 VMA_ASSERT(pBlockData != VMA_NULL);
7813 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7820 case ALLOCATION_TYPE_DEDICATED:
7821 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7822 return m_DedicatedAllocation.m_pMappedData;
7829 bool VmaAllocation_T::CanBecomeLost()
const
7833 case ALLOCATION_TYPE_BLOCK:
7834 return m_BlockAllocation.m_CanBecomeLost;
7835 case ALLOCATION_TYPE_DEDICATED:
7843 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7845 VMA_ASSERT(CanBecomeLost());
7851 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7854 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7859 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7865 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7875 #if VMA_STATS_STRING_ENABLED
7878 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7887 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
7889 json.WriteString(
"Type");
7890 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7892 json.WriteString(
"Size");
7893 json.WriteNumber(m_Size);
7895 if(m_pUserData != VMA_NULL)
7897 json.WriteString(
"UserData");
7898 if(IsUserDataString())
7900 json.WriteString((
const char*)m_pUserData);
7905 json.ContinueString_Pointer(m_pUserData);
7910 json.WriteString(
"CreationFrameIndex");
7911 json.WriteNumber(m_CreationFrameIndex);
7913 json.WriteString(
"LastUseFrameIndex");
7914 json.WriteNumber(GetLastUseFrameIndex());
7916 if(m_BufferImageUsage != 0)
7918 json.WriteString(
"Usage");
7919 json.WriteNumber(m_BufferImageUsage);
7925 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7927 VMA_ASSERT(IsUserDataString());
7928 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
7929 m_pUserData = VMA_NULL;
7932 void VmaAllocation_T::BlockAllocMap()
7934 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7936 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7942 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7946 void VmaAllocation_T::BlockAllocUnmap()
7948 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7950 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7956 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7960 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7962 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7966 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7968 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7969 *ppData = m_DedicatedAllocation.m_pMappedData;
7975 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7976 return VK_ERROR_MEMORY_MAP_FAILED;
7981 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7982 hAllocator->m_hDevice,
7983 m_DedicatedAllocation.m_hMemory,
7988 if(result == VK_SUCCESS)
7990 m_DedicatedAllocation.m_pMappedData = *ppData;
7997 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7999 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8001 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8006 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8007 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8008 hAllocator->m_hDevice,
8009 m_DedicatedAllocation.m_hMemory);
8014 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8018 #if VMA_STATS_STRING_ENABLED
8020 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8024 json.WriteString(
"Blocks");
8027 json.WriteString(
"Allocations");
8030 json.WriteString(
"UnusedRanges");
8033 json.WriteString(
"UsedBytes");
8036 json.WriteString(
"UnusedBytes");
8041 json.WriteString(
"AllocationSize");
8042 json.BeginObject(
true);
8043 json.WriteString(
"Min");
8045 json.WriteString(
"Avg");
8047 json.WriteString(
"Max");
8054 json.WriteString(
"UnusedRangeSize");
8055 json.BeginObject(
true);
8056 json.WriteString(
"Min");
8058 json.WriteString(
"Avg");
8060 json.WriteString(
"Max");
8068 #endif // #if VMA_STATS_STRING_ENABLED
8070 struct VmaSuballocationItemSizeLess
8073 const VmaSuballocationList::iterator lhs,
8074 const VmaSuballocationList::iterator rhs)
const
8076 return lhs->size < rhs->size;
8079 const VmaSuballocationList::iterator lhs,
8080 VkDeviceSize rhsSize)
const
8082 return lhs->size < rhsSize;
8090 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8092 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8096 #if VMA_STATS_STRING_ENABLED
8098 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8099 VkDeviceSize unusedBytes,
8100 size_t allocationCount,
8101 size_t unusedRangeCount)
const
8105 json.WriteString(
"TotalBytes");
8106 json.WriteNumber(GetSize());
8108 json.WriteString(
"UnusedBytes");
8109 json.WriteNumber(unusedBytes);
8111 json.WriteString(
"Allocations");
8112 json.WriteNumber((uint64_t)allocationCount);
8114 json.WriteString(
"UnusedRanges");
8115 json.WriteNumber((uint64_t)unusedRangeCount);
8117 json.WriteString(
"Suballocations");
8121 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8122 VkDeviceSize offset,
8125 json.BeginObject(
true);
8127 json.WriteString(
"Offset");
8128 json.WriteNumber(offset);
8130 hAllocation->PrintParameters(json);
8135 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8136 VkDeviceSize offset,
8137 VkDeviceSize size)
const
8139 json.BeginObject(
true);
8141 json.WriteString(
"Offset");
8142 json.WriteNumber(offset);
8144 json.WriteString(
"Type");
8145 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8147 json.WriteString(
"Size");
8148 json.WriteNumber(size);
8153 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8159 #endif // #if VMA_STATS_STRING_ENABLED
8164 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8165 VmaBlockMetadata(hAllocator),
8168 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8169 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8173 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8177 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8179 VmaBlockMetadata::Init(size);
8182 m_SumFreeSize = size;
8184 VmaSuballocation suballoc = {};
8185 suballoc.offset = 0;
8186 suballoc.size = size;
8187 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8188 suballoc.hAllocation = VK_NULL_HANDLE;
8190 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8191 m_Suballocations.push_back(suballoc);
8192 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8194 m_FreeSuballocationsBySize.push_back(suballocItem);
8197 bool VmaBlockMetadata_Generic::Validate()
const
8199 VMA_VALIDATE(!m_Suballocations.empty());
8202 VkDeviceSize calculatedOffset = 0;
8204 uint32_t calculatedFreeCount = 0;
8206 VkDeviceSize calculatedSumFreeSize = 0;
8209 size_t freeSuballocationsToRegister = 0;
8211 bool prevFree =
false;
8213 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8214 suballocItem != m_Suballocations.cend();
8217 const VmaSuballocation& subAlloc = *suballocItem;
8220 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8222 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8224 VMA_VALIDATE(!prevFree || !currFree);
8226 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8230 calculatedSumFreeSize += subAlloc.size;
8231 ++calculatedFreeCount;
8232 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8234 ++freeSuballocationsToRegister;
8238 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8242 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8243 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8246 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8249 calculatedOffset += subAlloc.size;
8250 prevFree = currFree;
8255 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8257 VkDeviceSize lastSize = 0;
8258 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8260 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8263 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8265 VMA_VALIDATE(suballocItem->size >= lastSize);
8267 lastSize = suballocItem->size;
8271 VMA_VALIDATE(ValidateFreeSuballocationList());
8272 VMA_VALIDATE(calculatedOffset == GetSize());
8273 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8274 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8279 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8281 if(!m_FreeSuballocationsBySize.empty())
8283 return m_FreeSuballocationsBySize.back()->size;
8291 bool VmaBlockMetadata_Generic::IsEmpty()
const
8293 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8296 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8300 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8312 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8313 suballocItem != m_Suballocations.cend();
8316 const VmaSuballocation& suballoc = *suballocItem;
8317 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8330 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8332 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8334 inoutStats.
size += GetSize();
8341 #if VMA_STATS_STRING_ENABLED
8343 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8345 PrintDetailedMap_Begin(json,
8347 m_Suballocations.size() - (size_t)m_FreeCount,
8351 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8352 suballocItem != m_Suballocations.cend();
8353 ++suballocItem, ++i)
8355 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8357 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8361 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8365 PrintDetailedMap_End(json);
8368 #endif // #if VMA_STATS_STRING_ENABLED
8370 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8371 uint32_t currentFrameIndex,
8372 uint32_t frameInUseCount,
8373 VkDeviceSize bufferImageGranularity,
8374 VkDeviceSize allocSize,
8375 VkDeviceSize allocAlignment,
8377 VmaSuballocationType allocType,
8378 bool canMakeOtherLost,
8380 VmaAllocationRequest* pAllocationRequest)
8382 VMA_ASSERT(allocSize > 0);
8383 VMA_ASSERT(!upperAddress);
8384 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8385 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8386 VMA_HEAVY_ASSERT(Validate());
8388 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8391 if(canMakeOtherLost ==
false &&
8392 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8398 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8399 if(freeSuballocCount > 0)
8404 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8405 m_FreeSuballocationsBySize.data(),
8406 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8407 allocSize + 2 * VMA_DEBUG_MARGIN,
8408 VmaSuballocationItemSizeLess());
8409 size_t index = it - m_FreeSuballocationsBySize.data();
8410 for(; index < freeSuballocCount; ++index)
8415 bufferImageGranularity,
8419 m_FreeSuballocationsBySize[index],
8421 &pAllocationRequest->offset,
8422 &pAllocationRequest->itemsToMakeLostCount,
8423 &pAllocationRequest->sumFreeSize,
8424 &pAllocationRequest->sumItemSize))
8426 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8431 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8433 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8434 it != m_Suballocations.end();
8437 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8440 bufferImageGranularity,
8446 &pAllocationRequest->offset,
8447 &pAllocationRequest->itemsToMakeLostCount,
8448 &pAllocationRequest->sumFreeSize,
8449 &pAllocationRequest->sumItemSize))
8451 pAllocationRequest->item = it;
8459 for(
size_t index = freeSuballocCount; index--; )
8464 bufferImageGranularity,
8468 m_FreeSuballocationsBySize[index],
8470 &pAllocationRequest->offset,
8471 &pAllocationRequest->itemsToMakeLostCount,
8472 &pAllocationRequest->sumFreeSize,
8473 &pAllocationRequest->sumItemSize))
8475 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8482 if(canMakeOtherLost)
8487 VmaAllocationRequest tmpAllocRequest = {};
8488 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8489 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8490 suballocIt != m_Suballocations.end();
8493 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8494 suballocIt->hAllocation->CanBecomeLost())
8499 bufferImageGranularity,
8505 &tmpAllocRequest.offset,
8506 &tmpAllocRequest.itemsToMakeLostCount,
8507 &tmpAllocRequest.sumFreeSize,
8508 &tmpAllocRequest.sumItemSize))
8512 *pAllocationRequest = tmpAllocRequest;
8513 pAllocationRequest->item = suballocIt;
8516 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8518 *pAllocationRequest = tmpAllocRequest;
8519 pAllocationRequest->item = suballocIt;
8532 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8533 uint32_t currentFrameIndex,
8534 uint32_t frameInUseCount,
8535 VmaAllocationRequest* pAllocationRequest)
8537 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8539 while(pAllocationRequest->itemsToMakeLostCount > 0)
8541 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8543 ++pAllocationRequest->item;
8545 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8546 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8547 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8548 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8550 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8551 --pAllocationRequest->itemsToMakeLostCount;
8559 VMA_HEAVY_ASSERT(Validate());
8560 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8561 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8566 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8568 uint32_t lostAllocationCount = 0;
8569 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8570 it != m_Suballocations.end();
8573 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8574 it->hAllocation->CanBecomeLost() &&
8575 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8577 it = FreeSuballocation(it);
8578 ++lostAllocationCount;
8581 return lostAllocationCount;
8584 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8586 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8587 it != m_Suballocations.end();
8590 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8592 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8594 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8595 return VK_ERROR_VALIDATION_FAILED_EXT;
8597 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8599 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8600 return VK_ERROR_VALIDATION_FAILED_EXT;
8608 void VmaBlockMetadata_Generic::Alloc(
8609 const VmaAllocationRequest& request,
8610 VmaSuballocationType type,
8611 VkDeviceSize allocSize,
8614 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8615 VMA_ASSERT(request.item != m_Suballocations.end());
8616 VmaSuballocation& suballoc = *request.item;
8618 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8620 VMA_ASSERT(request.offset >= suballoc.offset);
8621 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8622 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8623 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8627 UnregisterFreeSuballocation(request.item);
8629 suballoc.offset = request.offset;
8630 suballoc.size = allocSize;
8631 suballoc.type = type;
8632 suballoc.hAllocation = hAllocation;
8637 VmaSuballocation paddingSuballoc = {};
8638 paddingSuballoc.offset = request.offset + allocSize;
8639 paddingSuballoc.size = paddingEnd;
8640 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8641 VmaSuballocationList::iterator next = request.item;
8643 const VmaSuballocationList::iterator paddingEndItem =
8644 m_Suballocations.insert(next, paddingSuballoc);
8645 RegisterFreeSuballocation(paddingEndItem);
8651 VmaSuballocation paddingSuballoc = {};
8652 paddingSuballoc.offset = request.offset - paddingBegin;
8653 paddingSuballoc.size = paddingBegin;
8654 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8655 const VmaSuballocationList::iterator paddingBeginItem =
8656 m_Suballocations.insert(request.item, paddingSuballoc);
8657 RegisterFreeSuballocation(paddingBeginItem);
8661 m_FreeCount = m_FreeCount - 1;
8662 if(paddingBegin > 0)
8670 m_SumFreeSize -= allocSize;
8673 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8675 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8676 suballocItem != m_Suballocations.end();
8679 VmaSuballocation& suballoc = *suballocItem;
8680 if(suballoc.hAllocation == allocation)
8682 FreeSuballocation(suballocItem);
8683 VMA_HEAVY_ASSERT(Validate());
8687 VMA_ASSERT(0 &&
"Not found!");
8690 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8692 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8693 suballocItem != m_Suballocations.end();
8696 VmaSuballocation& suballoc = *suballocItem;
8697 if(suballoc.offset == offset)
8699 FreeSuballocation(suballocItem);
8703 VMA_ASSERT(0 &&
"Not found!");
8706 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
8708 VkDeviceSize lastSize = 0;
8709 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8711 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8713 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8714 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8715 VMA_VALIDATE(it->size >= lastSize);
8716 lastSize = it->size;
8721 bool VmaBlockMetadata_Generic::CheckAllocation(
8722 uint32_t currentFrameIndex,
8723 uint32_t frameInUseCount,
8724 VkDeviceSize bufferImageGranularity,
8725 VkDeviceSize allocSize,
8726 VkDeviceSize allocAlignment,
8727 VmaSuballocationType allocType,
8728 VmaSuballocationList::const_iterator suballocItem,
8729 bool canMakeOtherLost,
8730 VkDeviceSize* pOffset,
8731 size_t* itemsToMakeLostCount,
8732 VkDeviceSize* pSumFreeSize,
8733 VkDeviceSize* pSumItemSize)
const
8735 VMA_ASSERT(allocSize > 0);
8736 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8737 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8738 VMA_ASSERT(pOffset != VMA_NULL);
8740 *itemsToMakeLostCount = 0;
8744 if(canMakeOtherLost)
8746 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8748 *pSumFreeSize = suballocItem->size;
8752 if(suballocItem->hAllocation->CanBecomeLost() &&
8753 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8755 ++*itemsToMakeLostCount;
8756 *pSumItemSize = suballocItem->size;
8765 if(GetSize() - suballocItem->offset < allocSize)
8771 *pOffset = suballocItem->offset;
8774 if(VMA_DEBUG_MARGIN > 0)
8776 *pOffset += VMA_DEBUG_MARGIN;
8780 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8784 if(bufferImageGranularity > 1)
8786 bool bufferImageGranularityConflict =
false;
8787 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8788 while(prevSuballocItem != m_Suballocations.cbegin())
8791 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8792 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8794 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8796 bufferImageGranularityConflict =
true;
8804 if(bufferImageGranularityConflict)
8806 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8812 if(*pOffset >= suballocItem->offset + suballocItem->size)
8818 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8821 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8823 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8825 if(suballocItem->offset + totalSize > GetSize())
8832 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8833 if(totalSize > suballocItem->size)
8835 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8836 while(remainingSize > 0)
8839 if(lastSuballocItem == m_Suballocations.cend())
8843 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8845 *pSumFreeSize += lastSuballocItem->size;
8849 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8850 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8851 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8853 ++*itemsToMakeLostCount;
8854 *pSumItemSize += lastSuballocItem->size;
8861 remainingSize = (lastSuballocItem->size < remainingSize) ?
8862 remainingSize - lastSuballocItem->size : 0;
8868 if(bufferImageGranularity > 1)
8870 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8872 while(nextSuballocItem != m_Suballocations.cend())
8874 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8875 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8877 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8879 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8880 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8881 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8883 ++*itemsToMakeLostCount;
8902 const VmaSuballocation& suballoc = *suballocItem;
8903 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8905 *pSumFreeSize = suballoc.size;
8908 if(suballoc.size < allocSize)
8914 *pOffset = suballoc.offset;
8917 if(VMA_DEBUG_MARGIN > 0)
8919 *pOffset += VMA_DEBUG_MARGIN;
8923 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8927 if(bufferImageGranularity > 1)
8929 bool bufferImageGranularityConflict =
false;
8930 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8931 while(prevSuballocItem != m_Suballocations.cbegin())
8934 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8935 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8937 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8939 bufferImageGranularityConflict =
true;
8947 if(bufferImageGranularityConflict)
8949 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8954 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8957 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8960 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8967 if(bufferImageGranularity > 1)
8969 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8971 while(nextSuballocItem != m_Suballocations.cend())
8973 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8974 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8976 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8995 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8997 VMA_ASSERT(item != m_Suballocations.end());
8998 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9000 VmaSuballocationList::iterator nextItem = item;
9002 VMA_ASSERT(nextItem != m_Suballocations.end());
9003 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9005 item->size += nextItem->size;
9007 m_Suballocations.erase(nextItem);
9010 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9013 VmaSuballocation& suballoc = *suballocItem;
9014 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9015 suballoc.hAllocation = VK_NULL_HANDLE;
9019 m_SumFreeSize += suballoc.size;
9022 bool mergeWithNext =
false;
9023 bool mergeWithPrev =
false;
9025 VmaSuballocationList::iterator nextItem = suballocItem;
9027 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9029 mergeWithNext =
true;
9032 VmaSuballocationList::iterator prevItem = suballocItem;
9033 if(suballocItem != m_Suballocations.begin())
9036 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9038 mergeWithPrev =
true;
9044 UnregisterFreeSuballocation(nextItem);
9045 MergeFreeWithNext(suballocItem);
9050 UnregisterFreeSuballocation(prevItem);
9051 MergeFreeWithNext(prevItem);
9052 RegisterFreeSuballocation(prevItem);
9057 RegisterFreeSuballocation(suballocItem);
9058 return suballocItem;
9062 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9064 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9065 VMA_ASSERT(item->size > 0);
9069 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9071 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9073 if(m_FreeSuballocationsBySize.empty())
9075 m_FreeSuballocationsBySize.push_back(item);
9079 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9087 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9089 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9090 VMA_ASSERT(item->size > 0);
9094 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9096 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9098 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9099 m_FreeSuballocationsBySize.data(),
9100 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9102 VmaSuballocationItemSizeLess());
9103 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9104 index < m_FreeSuballocationsBySize.size();
9107 if(m_FreeSuballocationsBySize[index] == item)
9109 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9112 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9114 VMA_ASSERT(0 &&
"Not found.");
9120 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9121 VkDeviceSize bufferImageGranularity,
9122 VmaSuballocationType& inOutPrevSuballocType)
const
9124 if(bufferImageGranularity == 1 || IsEmpty())
9129 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9130 bool typeConflictFound =
false;
9131 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9132 it != m_Suballocations.cend();
9135 const VmaSuballocationType suballocType = it->type;
9136 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9138 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9139 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9141 typeConflictFound =
true;
9143 inOutPrevSuballocType = suballocType;
9147 return typeConflictFound || minAlignment >= bufferImageGranularity;
9153 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9154 VmaBlockMetadata(hAllocator),
9156 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9157 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9158 m_1stVectorIndex(0),
9159 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9160 m_1stNullItemsBeginCount(0),
9161 m_1stNullItemsMiddleCount(0),
9162 m_2ndNullItemsCount(0)
9166 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9170 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9172 VmaBlockMetadata::Init(size);
9173 m_SumFreeSize = size;
9176 bool VmaBlockMetadata_Linear::Validate()
const
9178 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9179 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9181 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9182 VMA_VALIDATE(!suballocations1st.empty() ||
9183 suballocations2nd.empty() ||
9184 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9186 if(!suballocations1st.empty())
9189 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9191 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9193 if(!suballocations2nd.empty())
9196 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9199 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9200 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9202 VkDeviceSize sumUsedSize = 0;
9203 const size_t suballoc1stCount = suballocations1st.size();
9204 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9206 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9208 const size_t suballoc2ndCount = suballocations2nd.size();
9209 size_t nullItem2ndCount = 0;
9210 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9212 const VmaSuballocation& suballoc = suballocations2nd[i];
9213 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9215 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9216 VMA_VALIDATE(suballoc.offset >= offset);
9220 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9221 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9222 sumUsedSize += suballoc.size;
9229 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9232 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9235 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9237 const VmaSuballocation& suballoc = suballocations1st[i];
9238 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9239 suballoc.hAllocation == VK_NULL_HANDLE);
9242 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9244 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9246 const VmaSuballocation& suballoc = suballocations1st[i];
9247 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9249 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9250 VMA_VALIDATE(suballoc.offset >= offset);
9251 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9255 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9256 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9257 sumUsedSize += suballoc.size;
9264 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9266 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9268 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9270 const size_t suballoc2ndCount = suballocations2nd.size();
9271 size_t nullItem2ndCount = 0;
9272 for(
size_t i = suballoc2ndCount; i--; )
9274 const VmaSuballocation& suballoc = suballocations2nd[i];
9275 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9277 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9278 VMA_VALIDATE(suballoc.offset >= offset);
9282 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9283 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9284 sumUsedSize += suballoc.size;
9291 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9294 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9297 VMA_VALIDATE(offset <= GetSize());
9298 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9303 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9305 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9306 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9309 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9311 const VkDeviceSize size = GetSize();
9323 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9325 switch(m_2ndVectorMode)
9327 case SECOND_VECTOR_EMPTY:
9333 const size_t suballocations1stCount = suballocations1st.size();
9334 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9335 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9336 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9338 firstSuballoc.offset,
9339 size - (lastSuballoc.offset + lastSuballoc.size));
9343 case SECOND_VECTOR_RING_BUFFER:
9348 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9349 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9350 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9351 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9355 case SECOND_VECTOR_DOUBLE_STACK:
9360 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9361 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9362 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9363 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9373 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9375 const VkDeviceSize size = GetSize();
9376 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9377 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9378 const size_t suballoc1stCount = suballocations1st.size();
9379 const size_t suballoc2ndCount = suballocations2nd.size();
9390 VkDeviceSize lastOffset = 0;
9392 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9394 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9395 size_t nextAlloc2ndIndex = 0;
9396 while(lastOffset < freeSpace2ndTo1stEnd)
9399 while(nextAlloc2ndIndex < suballoc2ndCount &&
9400 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9402 ++nextAlloc2ndIndex;
9406 if(nextAlloc2ndIndex < suballoc2ndCount)
9408 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9411 if(lastOffset < suballoc.offset)
9414 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9428 lastOffset = suballoc.offset + suballoc.size;
9429 ++nextAlloc2ndIndex;
9435 if(lastOffset < freeSpace2ndTo1stEnd)
9437 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9445 lastOffset = freeSpace2ndTo1stEnd;
9450 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9451 const VkDeviceSize freeSpace1stTo2ndEnd =
9452 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9453 while(lastOffset < freeSpace1stTo2ndEnd)
9456 while(nextAlloc1stIndex < suballoc1stCount &&
9457 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9459 ++nextAlloc1stIndex;
9463 if(nextAlloc1stIndex < suballoc1stCount)
9465 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9468 if(lastOffset < suballoc.offset)
9471 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9485 lastOffset = suballoc.offset + suballoc.size;
9486 ++nextAlloc1stIndex;
9492 if(lastOffset < freeSpace1stTo2ndEnd)
9494 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9502 lastOffset = freeSpace1stTo2ndEnd;
9506 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9508 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9509 while(lastOffset < size)
9512 while(nextAlloc2ndIndex != SIZE_MAX &&
9513 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9515 --nextAlloc2ndIndex;
9519 if(nextAlloc2ndIndex != SIZE_MAX)
9521 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9524 if(lastOffset < suballoc.offset)
9527 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9541 lastOffset = suballoc.offset + suballoc.size;
9542 --nextAlloc2ndIndex;
9548 if(lastOffset < size)
9550 const VkDeviceSize unusedRangeSize = size - lastOffset;
9566 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
9568 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9569 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9570 const VkDeviceSize size = GetSize();
9571 const size_t suballoc1stCount = suballocations1st.size();
9572 const size_t suballoc2ndCount = suballocations2nd.size();
9574 inoutStats.
size += size;
9576 VkDeviceSize lastOffset = 0;
9578 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9580 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9581 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9582 while(lastOffset < freeSpace2ndTo1stEnd)
9585 while(nextAlloc2ndIndex < suballoc2ndCount &&
9586 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9588 ++nextAlloc2ndIndex;
9592 if(nextAlloc2ndIndex < suballoc2ndCount)
9594 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9597 if(lastOffset < suballoc.offset)
9600 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9611 lastOffset = suballoc.offset + suballoc.size;
9612 ++nextAlloc2ndIndex;
9617 if(lastOffset < freeSpace2ndTo1stEnd)
9620 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9627 lastOffset = freeSpace2ndTo1stEnd;
9632 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9633 const VkDeviceSize freeSpace1stTo2ndEnd =
9634 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9635 while(lastOffset < freeSpace1stTo2ndEnd)
9638 while(nextAlloc1stIndex < suballoc1stCount &&
9639 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9641 ++nextAlloc1stIndex;
9645 if(nextAlloc1stIndex < suballoc1stCount)
9647 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9650 if(lastOffset < suballoc.offset)
9653 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9664 lastOffset = suballoc.offset + suballoc.size;
9665 ++nextAlloc1stIndex;
9670 if(lastOffset < freeSpace1stTo2ndEnd)
9673 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9680 lastOffset = freeSpace1stTo2ndEnd;
9684 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9686 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9687 while(lastOffset < size)
9690 while(nextAlloc2ndIndex != SIZE_MAX &&
9691 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9693 --nextAlloc2ndIndex;
9697 if(nextAlloc2ndIndex != SIZE_MAX)
9699 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9702 if(lastOffset < suballoc.offset)
9705 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9716 lastOffset = suballoc.offset + suballoc.size;
9717 --nextAlloc2ndIndex;
9722 if(lastOffset < size)
9725 const VkDeviceSize unusedRangeSize = size - lastOffset;
9738 #if VMA_STATS_STRING_ENABLED
9739 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
9741 const VkDeviceSize size = GetSize();
9742 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9743 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9744 const size_t suballoc1stCount = suballocations1st.size();
9745 const size_t suballoc2ndCount = suballocations2nd.size();
9749 size_t unusedRangeCount = 0;
9750 VkDeviceSize usedBytes = 0;
9752 VkDeviceSize lastOffset = 0;
9754 size_t alloc2ndCount = 0;
9755 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9757 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9758 size_t nextAlloc2ndIndex = 0;
9759 while(lastOffset < freeSpace2ndTo1stEnd)
9762 while(nextAlloc2ndIndex < suballoc2ndCount &&
9763 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9765 ++nextAlloc2ndIndex;
9769 if(nextAlloc2ndIndex < suballoc2ndCount)
9771 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9774 if(lastOffset < suballoc.offset)
9783 usedBytes += suballoc.size;
9786 lastOffset = suballoc.offset + suballoc.size;
9787 ++nextAlloc2ndIndex;
9792 if(lastOffset < freeSpace2ndTo1stEnd)
9799 lastOffset = freeSpace2ndTo1stEnd;
9804 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9805 size_t alloc1stCount = 0;
9806 const VkDeviceSize freeSpace1stTo2ndEnd =
9807 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9808 while(lastOffset < freeSpace1stTo2ndEnd)
9811 while(nextAlloc1stIndex < suballoc1stCount &&
9812 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9814 ++nextAlloc1stIndex;
9818 if(nextAlloc1stIndex < suballoc1stCount)
9820 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9823 if(lastOffset < suballoc.offset)
9832 usedBytes += suballoc.size;
9835 lastOffset = suballoc.offset + suballoc.size;
9836 ++nextAlloc1stIndex;
9841 if(lastOffset < size)
9848 lastOffset = freeSpace1stTo2ndEnd;
9852 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9854 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9855 while(lastOffset < size)
9858 while(nextAlloc2ndIndex != SIZE_MAX &&
9859 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9861 --nextAlloc2ndIndex;
9865 if(nextAlloc2ndIndex != SIZE_MAX)
9867 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9870 if(lastOffset < suballoc.offset)
9879 usedBytes += suballoc.size;
9882 lastOffset = suballoc.offset + suballoc.size;
9883 --nextAlloc2ndIndex;
9888 if(lastOffset < size)
9900 const VkDeviceSize unusedBytes = size - usedBytes;
9901 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9906 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9908 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9909 size_t nextAlloc2ndIndex = 0;
9910 while(lastOffset < freeSpace2ndTo1stEnd)
9913 while(nextAlloc2ndIndex < suballoc2ndCount &&
9914 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9916 ++nextAlloc2ndIndex;
9920 if(nextAlloc2ndIndex < suballoc2ndCount)
9922 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9925 if(lastOffset < suballoc.offset)
9928 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9929 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9934 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9937 lastOffset = suballoc.offset + suballoc.size;
9938 ++nextAlloc2ndIndex;
9943 if(lastOffset < freeSpace2ndTo1stEnd)
9946 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9947 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9951 lastOffset = freeSpace2ndTo1stEnd;
9956 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9957 while(lastOffset < freeSpace1stTo2ndEnd)
9960 while(nextAlloc1stIndex < suballoc1stCount &&
9961 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9963 ++nextAlloc1stIndex;
9967 if(nextAlloc1stIndex < suballoc1stCount)
9969 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9972 if(lastOffset < suballoc.offset)
9975 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9976 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9981 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9984 lastOffset = suballoc.offset + suballoc.size;
9985 ++nextAlloc1stIndex;
9990 if(lastOffset < freeSpace1stTo2ndEnd)
9993 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9994 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9998 lastOffset = freeSpace1stTo2ndEnd;
10002 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10004 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10005 while(lastOffset < size)
10008 while(nextAlloc2ndIndex != SIZE_MAX &&
10009 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10011 --nextAlloc2ndIndex;
10015 if(nextAlloc2ndIndex != SIZE_MAX)
10017 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10020 if(lastOffset < suballoc.offset)
10023 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10024 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10029 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10032 lastOffset = suballoc.offset + suballoc.size;
10033 --nextAlloc2ndIndex;
10038 if(lastOffset < size)
10041 const VkDeviceSize unusedRangeSize = size - lastOffset;
10042 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10051 PrintDetailedMap_End(json);
10053 #endif // #if VMA_STATS_STRING_ENABLED
10055 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10056 uint32_t currentFrameIndex,
10057 uint32_t frameInUseCount,
10058 VkDeviceSize bufferImageGranularity,
10059 VkDeviceSize allocSize,
10060 VkDeviceSize allocAlignment,
10062 VmaSuballocationType allocType,
10063 bool canMakeOtherLost,
10065 VmaAllocationRequest* pAllocationRequest)
10067 VMA_ASSERT(allocSize > 0);
10068 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10069 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10070 VMA_HEAVY_ASSERT(Validate());
10071 return upperAddress ?
10072 CreateAllocationRequest_UpperAddress(
10073 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10074 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10075 CreateAllocationRequest_LowerAddress(
10076 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10077 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10080 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10081 uint32_t currentFrameIndex,
10082 uint32_t frameInUseCount,
10083 VkDeviceSize bufferImageGranularity,
10084 VkDeviceSize allocSize,
10085 VkDeviceSize allocAlignment,
10086 VmaSuballocationType allocType,
10087 bool canMakeOtherLost,
10089 VmaAllocationRequest* pAllocationRequest)
10091 const VkDeviceSize size = GetSize();
10092 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10093 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10095 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10097 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10102 if(allocSize > size)
10106 VkDeviceSize resultBaseOffset = size - allocSize;
10107 if(!suballocations2nd.empty())
10109 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10110 resultBaseOffset = lastSuballoc.offset - allocSize;
10111 if(allocSize > lastSuballoc.offset)
10118 VkDeviceSize resultOffset = resultBaseOffset;
10121 if(VMA_DEBUG_MARGIN > 0)
10123 if(resultOffset < VMA_DEBUG_MARGIN)
10127 resultOffset -= VMA_DEBUG_MARGIN;
10131 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10135 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10137 bool bufferImageGranularityConflict =
false;
10138 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10140 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10141 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10143 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10145 bufferImageGranularityConflict =
true;
10153 if(bufferImageGranularityConflict)
10155 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10160 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10161 suballocations1st.back().offset + suballocations1st.back().size :
10163 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10167 if(bufferImageGranularity > 1)
10169 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10171 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10172 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10174 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10188 pAllocationRequest->offset = resultOffset;
10189 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10190 pAllocationRequest->sumItemSize = 0;
10192 pAllocationRequest->itemsToMakeLostCount = 0;
10193 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10200 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10201 uint32_t currentFrameIndex,
10202 uint32_t frameInUseCount,
10203 VkDeviceSize bufferImageGranularity,
10204 VkDeviceSize allocSize,
10205 VkDeviceSize allocAlignment,
10206 VmaSuballocationType allocType,
10207 bool canMakeOtherLost,
10209 VmaAllocationRequest* pAllocationRequest)
10211 const VkDeviceSize size = GetSize();
10212 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10213 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10215 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10219 VkDeviceSize resultBaseOffset = 0;
10220 if(!suballocations1st.empty())
10222 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10223 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10227 VkDeviceSize resultOffset = resultBaseOffset;
10230 if(VMA_DEBUG_MARGIN > 0)
10232 resultOffset += VMA_DEBUG_MARGIN;
10236 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10240 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10242 bool bufferImageGranularityConflict =
false;
10243 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10245 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10246 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10248 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10250 bufferImageGranularityConflict =
true;
10258 if(bufferImageGranularityConflict)
10260 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10264 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10265 suballocations2nd.back().offset : size;
10268 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10272 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10274 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10276 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10277 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10279 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10293 pAllocationRequest->offset = resultOffset;
10294 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10295 pAllocationRequest->sumItemSize = 0;
10297 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10298 pAllocationRequest->itemsToMakeLostCount = 0;
10305 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10307 VMA_ASSERT(!suballocations1st.empty());
10309 VkDeviceSize resultBaseOffset = 0;
10310 if(!suballocations2nd.empty())
10312 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10313 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10317 VkDeviceSize resultOffset = resultBaseOffset;
10320 if(VMA_DEBUG_MARGIN > 0)
10322 resultOffset += VMA_DEBUG_MARGIN;
10326 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10330 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10332 bool bufferImageGranularityConflict =
false;
10333 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10335 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10336 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10338 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10340 bufferImageGranularityConflict =
true;
10348 if(bufferImageGranularityConflict)
10350 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10354 pAllocationRequest->itemsToMakeLostCount = 0;
10355 pAllocationRequest->sumItemSize = 0;
10356 size_t index1st = m_1stNullItemsBeginCount;
10358 if(canMakeOtherLost)
10360 while(index1st < suballocations1st.size() &&
10361 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10364 const VmaSuballocation& suballoc = suballocations1st[index1st];
10365 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10371 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10372 if(suballoc.hAllocation->CanBecomeLost() &&
10373 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10375 ++pAllocationRequest->itemsToMakeLostCount;
10376 pAllocationRequest->sumItemSize += suballoc.size;
10388 if(bufferImageGranularity > 1)
10390 while(index1st < suballocations1st.size())
10392 const VmaSuballocation& suballoc = suballocations1st[index1st];
10393 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10395 if(suballoc.hAllocation != VK_NULL_HANDLE)
10398 if(suballoc.hAllocation->CanBecomeLost() &&
10399 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10401 ++pAllocationRequest->itemsToMakeLostCount;
10402 pAllocationRequest->sumItemSize += suballoc.size;
10420 if(index1st == suballocations1st.size() &&
10421 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10424 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10429 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10430 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10434 if(bufferImageGranularity > 1)
10436 for(
size_t nextSuballocIndex = index1st;
10437 nextSuballocIndex < suballocations1st.size();
10438 nextSuballocIndex++)
10440 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10441 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10443 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10457 pAllocationRequest->offset = resultOffset;
10458 pAllocationRequest->sumFreeSize =
10459 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10461 - pAllocationRequest->sumItemSize;
10462 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10471 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10472 uint32_t currentFrameIndex,
10473 uint32_t frameInUseCount,
10474 VmaAllocationRequest* pAllocationRequest)
10476 if(pAllocationRequest->itemsToMakeLostCount == 0)
10481 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10484 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10485 size_t index = m_1stNullItemsBeginCount;
10486 size_t madeLostCount = 0;
10487 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10489 if(index == suballocations->size())
10493 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10495 suballocations = &AccessSuballocations2nd();
10499 VMA_ASSERT(!suballocations->empty());
10501 VmaSuballocation& suballoc = (*suballocations)[index];
10502 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10504 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10505 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10506 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10508 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10509 suballoc.hAllocation = VK_NULL_HANDLE;
10510 m_SumFreeSize += suballoc.size;
10511 if(suballocations == &AccessSuballocations1st())
10513 ++m_1stNullItemsMiddleCount;
10517 ++m_2ndNullItemsCount;
10529 CleanupAfterFree();
10535 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10537 uint32_t lostAllocationCount = 0;
10539 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10540 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10542 VmaSuballocation& suballoc = suballocations1st[i];
10543 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10544 suballoc.hAllocation->CanBecomeLost() &&
10545 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10547 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10548 suballoc.hAllocation = VK_NULL_HANDLE;
10549 ++m_1stNullItemsMiddleCount;
10550 m_SumFreeSize += suballoc.size;
10551 ++lostAllocationCount;
10555 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10556 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10558 VmaSuballocation& suballoc = suballocations2nd[i];
10559 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10560 suballoc.hAllocation->CanBecomeLost() &&
10561 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10563 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10564 suballoc.hAllocation = VK_NULL_HANDLE;
10565 ++m_2ndNullItemsCount;
10566 m_SumFreeSize += suballoc.size;
10567 ++lostAllocationCount;
10571 if(lostAllocationCount)
10573 CleanupAfterFree();
10576 return lostAllocationCount;
10579 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10581 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10582 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10584 const VmaSuballocation& suballoc = suballocations1st[i];
10585 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10587 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10589 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10590 return VK_ERROR_VALIDATION_FAILED_EXT;
10592 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10594 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10595 return VK_ERROR_VALIDATION_FAILED_EXT;
10600 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10601 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10603 const VmaSuballocation& suballoc = suballocations2nd[i];
10604 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10606 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10608 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10609 return VK_ERROR_VALIDATION_FAILED_EXT;
10611 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10613 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10614 return VK_ERROR_VALIDATION_FAILED_EXT;
10622 void VmaBlockMetadata_Linear::Alloc(
10623 const VmaAllocationRequest& request,
10624 VmaSuballocationType type,
10625 VkDeviceSize allocSize,
10628 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10630 switch(request.type)
10632 case VmaAllocationRequestType::UpperAddress:
10634 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10635 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10636 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10637 suballocations2nd.push_back(newSuballoc);
10638 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10641 case VmaAllocationRequestType::EndOf1st:
10643 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10645 VMA_ASSERT(suballocations1st.empty() ||
10646 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10648 VMA_ASSERT(request.offset + allocSize <= GetSize());
10650 suballocations1st.push_back(newSuballoc);
10653 case VmaAllocationRequestType::EndOf2nd:
10655 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10657 VMA_ASSERT(!suballocations1st.empty() &&
10658 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10659 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10661 switch(m_2ndVectorMode)
10663 case SECOND_VECTOR_EMPTY:
10665 VMA_ASSERT(suballocations2nd.empty());
10666 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10668 case SECOND_VECTOR_RING_BUFFER:
10670 VMA_ASSERT(!suballocations2nd.empty());
10672 case SECOND_VECTOR_DOUBLE_STACK:
10673 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10679 suballocations2nd.push_back(newSuballoc);
10683 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10686 m_SumFreeSize -= newSuballoc.size;
10689 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10691 FreeAtOffset(allocation->GetOffset());
10694 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10696 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10697 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10699 if(!suballocations1st.empty())
10702 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10703 if(firstSuballoc.offset == offset)
10705 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10706 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10707 m_SumFreeSize += firstSuballoc.size;
10708 ++m_1stNullItemsBeginCount;
10709 CleanupAfterFree();
10715 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10716 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10718 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10719 if(lastSuballoc.offset == offset)
10721 m_SumFreeSize += lastSuballoc.size;
10722 suballocations2nd.pop_back();
10723 CleanupAfterFree();
10728 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10730 VmaSuballocation& lastSuballoc = suballocations1st.back();
10731 if(lastSuballoc.offset == offset)
10733 m_SumFreeSize += lastSuballoc.size;
10734 suballocations1st.pop_back();
10735 CleanupAfterFree();
10742 VmaSuballocation refSuballoc;
10743 refSuballoc.offset = offset;
10745 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10746 suballocations1st.begin() + m_1stNullItemsBeginCount,
10747 suballocations1st.end(),
10749 VmaSuballocationOffsetLess());
10750 if(it != suballocations1st.end())
10752 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10753 it->hAllocation = VK_NULL_HANDLE;
10754 ++m_1stNullItemsMiddleCount;
10755 m_SumFreeSize += it->size;
10756 CleanupAfterFree();
10761 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10764 VmaSuballocation refSuballoc;
10765 refSuballoc.offset = offset;
10767 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10768 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10769 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10770 if(it != suballocations2nd.end())
10772 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10773 it->hAllocation = VK_NULL_HANDLE;
10774 ++m_2ndNullItemsCount;
10775 m_SumFreeSize += it->size;
10776 CleanupAfterFree();
10781 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10784 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
10786 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10787 const size_t suballocCount = AccessSuballocations1st().size();
10788 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10791 void VmaBlockMetadata_Linear::CleanupAfterFree()
10793 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10794 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10798 suballocations1st.clear();
10799 suballocations2nd.clear();
10800 m_1stNullItemsBeginCount = 0;
10801 m_1stNullItemsMiddleCount = 0;
10802 m_2ndNullItemsCount = 0;
10803 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10807 const size_t suballoc1stCount = suballocations1st.size();
10808 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10809 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10812 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10813 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10815 ++m_1stNullItemsBeginCount;
10816 --m_1stNullItemsMiddleCount;
10820 while(m_1stNullItemsMiddleCount > 0 &&
10821 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10823 --m_1stNullItemsMiddleCount;
10824 suballocations1st.pop_back();
10828 while(m_2ndNullItemsCount > 0 &&
10829 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10831 --m_2ndNullItemsCount;
10832 suballocations2nd.pop_back();
10836 while(m_2ndNullItemsCount > 0 &&
10837 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10839 --m_2ndNullItemsCount;
10840 VmaVectorRemove(suballocations2nd, 0);
10843 if(ShouldCompact1st())
10845 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10846 size_t srcIndex = m_1stNullItemsBeginCount;
10847 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10849 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10853 if(dstIndex != srcIndex)
10855 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10859 suballocations1st.resize(nonNullItemCount);
10860 m_1stNullItemsBeginCount = 0;
10861 m_1stNullItemsMiddleCount = 0;
10865 if(suballocations2nd.empty())
10867 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10871 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10873 suballocations1st.clear();
10874 m_1stNullItemsBeginCount = 0;
10876 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10879 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10880 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10881 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10882 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10884 ++m_1stNullItemsBeginCount;
10885 --m_1stNullItemsMiddleCount;
10887 m_2ndNullItemsCount = 0;
10888 m_1stVectorIndex ^= 1;
10893 VMA_HEAVY_ASSERT(Validate());
10900 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10901 VmaBlockMetadata(hAllocator),
10903 m_AllocationCount(0),
10907 memset(m_FreeList, 0,
sizeof(m_FreeList));
10910 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10912 DeleteNode(m_Root);
10915 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10917 VmaBlockMetadata::Init(size);
10919 m_UsableSize = VmaPrevPow2(size);
10920 m_SumFreeSize = m_UsableSize;
10924 while(m_LevelCount < MAX_LEVELS &&
10925 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10930 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10931 rootNode->offset = 0;
10932 rootNode->type = Node::TYPE_FREE;
10933 rootNode->parent = VMA_NULL;
10934 rootNode->buddy = VMA_NULL;
10937 AddToFreeListFront(0, rootNode);
10940 bool VmaBlockMetadata_Buddy::Validate()
const
10943 ValidationContext ctx;
10944 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10946 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10948 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10949 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10952 for(uint32_t level = 0; level < m_LevelCount; ++level)
10954 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10955 m_FreeList[level].front->free.prev == VMA_NULL);
10957 for(Node* node = m_FreeList[level].front;
10959 node = node->free.next)
10961 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10963 if(node->free.next == VMA_NULL)
10965 VMA_VALIDATE(m_FreeList[level].back == node);
10969 VMA_VALIDATE(node->free.next->free.prev == node);
10975 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10977 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10983 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
10985 for(uint32_t level = 0; level < m_LevelCount; ++level)
10987 if(m_FreeList[level].front != VMA_NULL)
10989 return LevelToNodeSize(level);
10995 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10997 const VkDeviceSize unusableSize = GetUnusableSize();
11008 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11010 if(unusableSize > 0)
11019 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11021 const VkDeviceSize unusableSize = GetUnusableSize();
11023 inoutStats.
size += GetSize();
11024 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11029 if(unusableSize > 0)
11036 #if VMA_STATS_STRING_ENABLED
11038 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11042 CalcAllocationStatInfo(stat);
11044 PrintDetailedMap_Begin(
11050 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11052 const VkDeviceSize unusableSize = GetUnusableSize();
11053 if(unusableSize > 0)
11055 PrintDetailedMap_UnusedRange(json,
11060 PrintDetailedMap_End(json);
11063 #endif // #if VMA_STATS_STRING_ENABLED
11065 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11066 uint32_t currentFrameIndex,
11067 uint32_t frameInUseCount,
11068 VkDeviceSize bufferImageGranularity,
11069 VkDeviceSize allocSize,
11070 VkDeviceSize allocAlignment,
11072 VmaSuballocationType allocType,
11073 bool canMakeOtherLost,
11075 VmaAllocationRequest* pAllocationRequest)
11077 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11081 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11082 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11083 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11085 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11086 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11089 if(allocSize > m_UsableSize)
11094 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11095 for(uint32_t level = targetLevel + 1; level--; )
11097 for(Node* freeNode = m_FreeList[level].front;
11098 freeNode != VMA_NULL;
11099 freeNode = freeNode->free.next)
11101 if(freeNode->offset % allocAlignment == 0)
11103 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11104 pAllocationRequest->offset = freeNode->offset;
11105 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11106 pAllocationRequest->sumItemSize = 0;
11107 pAllocationRequest->itemsToMakeLostCount = 0;
11108 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11117 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11118 uint32_t currentFrameIndex,
11119 uint32_t frameInUseCount,
11120 VmaAllocationRequest* pAllocationRequest)
11126 return pAllocationRequest->itemsToMakeLostCount == 0;
11129 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11138 void VmaBlockMetadata_Buddy::Alloc(
11139 const VmaAllocationRequest& request,
11140 VmaSuballocationType type,
11141 VkDeviceSize allocSize,
11144 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11146 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11147 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11149 Node* currNode = m_FreeList[currLevel].front;
11150 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11151 while(currNode->offset != request.offset)
11153 currNode = currNode->free.next;
11154 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11158 while(currLevel < targetLevel)
11162 RemoveFromFreeList(currLevel, currNode);
11164 const uint32_t childrenLevel = currLevel + 1;
11167 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11168 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11170 leftChild->offset = currNode->offset;
11171 leftChild->type = Node::TYPE_FREE;
11172 leftChild->parent = currNode;
11173 leftChild->buddy = rightChild;
11175 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11176 rightChild->type = Node::TYPE_FREE;
11177 rightChild->parent = currNode;
11178 rightChild->buddy = leftChild;
11181 currNode->type = Node::TYPE_SPLIT;
11182 currNode->split.leftChild = leftChild;
11185 AddToFreeListFront(childrenLevel, rightChild);
11186 AddToFreeListFront(childrenLevel, leftChild);
11191 currNode = m_FreeList[currLevel].front;
11200 VMA_ASSERT(currLevel == targetLevel &&
11201 currNode != VMA_NULL &&
11202 currNode->type == Node::TYPE_FREE);
11203 RemoveFromFreeList(currLevel, currNode);
11206 currNode->type = Node::TYPE_ALLOCATION;
11207 currNode->allocation.alloc = hAllocation;
11209 ++m_AllocationCount;
11211 m_SumFreeSize -= allocSize;
11214 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11216 if(node->type == Node::TYPE_SPLIT)
11218 DeleteNode(node->split.leftChild->buddy);
11219 DeleteNode(node->split.leftChild);
11222 vma_delete(GetAllocationCallbacks(), node);
11225 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11227 VMA_VALIDATE(level < m_LevelCount);
11228 VMA_VALIDATE(curr->parent == parent);
11229 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11230 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11233 case Node::TYPE_FREE:
11235 ctx.calculatedSumFreeSize += levelNodeSize;
11236 ++ctx.calculatedFreeCount;
11238 case Node::TYPE_ALLOCATION:
11239 ++ctx.calculatedAllocationCount;
11240 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11241 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11243 case Node::TYPE_SPLIT:
11245 const uint32_t childrenLevel = level + 1;
11246 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11247 const Node*
const leftChild = curr->split.leftChild;
11248 VMA_VALIDATE(leftChild != VMA_NULL);
11249 VMA_VALIDATE(leftChild->offset == curr->offset);
11250 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11252 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11254 const Node*
const rightChild = leftChild->buddy;
11255 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11256 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11258 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11269 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11272 uint32_t level = 0;
11273 VkDeviceSize currLevelNodeSize = m_UsableSize;
11274 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11275 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11278 currLevelNodeSize = nextLevelNodeSize;
11279 nextLevelNodeSize = currLevelNodeSize >> 1;
11284 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11287 Node* node = m_Root;
11288 VkDeviceSize nodeOffset = 0;
11289 uint32_t level = 0;
11290 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11291 while(node->type == Node::TYPE_SPLIT)
11293 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11294 if(offset < nodeOffset + nextLevelSize)
11296 node = node->split.leftChild;
11300 node = node->split.leftChild->buddy;
11301 nodeOffset += nextLevelSize;
11304 levelNodeSize = nextLevelSize;
11307 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11308 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11311 --m_AllocationCount;
11312 m_SumFreeSize += alloc->GetSize();
11314 node->type = Node::TYPE_FREE;
11317 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11319 RemoveFromFreeList(level, node->buddy);
11320 Node*
const parent = node->parent;
11322 vma_delete(GetAllocationCallbacks(), node->buddy);
11323 vma_delete(GetAllocationCallbacks(), node);
11324 parent->type = Node::TYPE_FREE;
11332 AddToFreeListFront(level, node);
11335 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11339 case Node::TYPE_FREE:
11345 case Node::TYPE_ALLOCATION:
11347 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11353 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11354 if(unusedRangeSize > 0)
11363 case Node::TYPE_SPLIT:
11365 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11366 const Node*
const leftChild = node->split.leftChild;
11367 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11368 const Node*
const rightChild = leftChild->buddy;
11369 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11377 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11379 VMA_ASSERT(node->type == Node::TYPE_FREE);
11382 Node*
const frontNode = m_FreeList[level].front;
11383 if(frontNode == VMA_NULL)
11385 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11386 node->free.prev = node->free.next = VMA_NULL;
11387 m_FreeList[level].front = m_FreeList[level].back = node;
11391 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11392 node->free.prev = VMA_NULL;
11393 node->free.next = frontNode;
11394 frontNode->free.prev = node;
11395 m_FreeList[level].front = node;
11399 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11401 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11404 if(node->free.prev == VMA_NULL)
11406 VMA_ASSERT(m_FreeList[level].front == node);
11407 m_FreeList[level].front = node->free.next;
11411 Node*
const prevFreeNode = node->free.prev;
11412 VMA_ASSERT(prevFreeNode->free.next == node);
11413 prevFreeNode->free.next = node->free.next;
11417 if(node->free.next == VMA_NULL)
11419 VMA_ASSERT(m_FreeList[level].back == node);
11420 m_FreeList[level].back = node->free.prev;
11424 Node*
const nextFreeNode = node->free.next;
11425 VMA_ASSERT(nextFreeNode->free.prev == node);
11426 nextFreeNode->free.prev = node->free.prev;
11430 #if VMA_STATS_STRING_ENABLED
11431 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
11435 case Node::TYPE_FREE:
11436 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11438 case Node::TYPE_ALLOCATION:
11440 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11441 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11442 if(allocSize < levelNodeSize)
11444 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11448 case Node::TYPE_SPLIT:
11450 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11451 const Node*
const leftChild = node->split.leftChild;
11452 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11453 const Node*
const rightChild = leftChild->buddy;
11454 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11461 #endif // #if VMA_STATS_STRING_ENABLED
11467 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11468 m_pMetadata(VMA_NULL),
11469 m_MemoryTypeIndex(UINT32_MAX),
11471 m_hMemory(VK_NULL_HANDLE),
11473 m_pMappedData(VMA_NULL)
11477 void VmaDeviceMemoryBlock::Init(
11480 uint32_t newMemoryTypeIndex,
11481 VkDeviceMemory newMemory,
11482 VkDeviceSize newSize,
11484 uint32_t algorithm)
11486 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11488 m_hParentPool = hParentPool;
11489 m_MemoryTypeIndex = newMemoryTypeIndex;
11491 m_hMemory = newMemory;
11496 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11499 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11505 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11507 m_pMetadata->Init(newSize);
11510 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11514 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11516 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11517 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11518 m_hMemory = VK_NULL_HANDLE;
11520 vma_delete(allocator, m_pMetadata);
11521 m_pMetadata = VMA_NULL;
11524 bool VmaDeviceMemoryBlock::Validate()
const
11526 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11527 (m_pMetadata->GetSize() != 0));
11529 return m_pMetadata->Validate();
11532 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11534 void* pData =
nullptr;
11535 VkResult res = Map(hAllocator, 1, &pData);
11536 if(res != VK_SUCCESS)
11541 res = m_pMetadata->CheckCorruption(pData);
11543 Unmap(hAllocator, 1);
11548 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11555 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11556 if(m_MapCount != 0)
11558 m_MapCount += count;
11559 VMA_ASSERT(m_pMappedData != VMA_NULL);
11560 if(ppData != VMA_NULL)
11562 *ppData = m_pMappedData;
11568 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11569 hAllocator->m_hDevice,
11575 if(result == VK_SUCCESS)
11577 if(ppData != VMA_NULL)
11579 *ppData = m_pMappedData;
11581 m_MapCount = count;
11587 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11594 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11595 if(m_MapCount >= count)
11597 m_MapCount -= count;
11598 if(m_MapCount == 0)
11600 m_pMappedData = VMA_NULL;
11601 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11606 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11610 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11612 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11613 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11616 VkResult res = Map(hAllocator, 1, &pData);
11617 if(res != VK_SUCCESS)
11622 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11623 VmaWriteMagicValue(pData, allocOffset + allocSize);
11625 Unmap(hAllocator, 1);
11630 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11632 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11633 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11636 VkResult res = Map(hAllocator, 1, &pData);
11637 if(res != VK_SUCCESS)
11642 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11644 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11646 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11648 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11651 Unmap(hAllocator, 1);
11656 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11659 VkDeviceSize allocationLocalOffset,
11663 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11664 hAllocation->GetBlock() ==
this);
11665 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11666 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11667 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11669 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11670 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11673 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11676 VkDeviceSize allocationLocalOffset,
11680 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11681 hAllocation->GetBlock() ==
this);
11682 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11683 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11684 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11686 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11687 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11692 memset(&outInfo, 0,
sizeof(outInfo));
11711 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11719 VmaPool_T::VmaPool_T(
11722 VkDeviceSize preferredBlockSize) :
11726 createInfo.memoryTypeIndex,
11727 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11728 createInfo.minBlockCount,
11729 createInfo.maxBlockCount,
11731 createInfo.frameInUseCount,
11732 createInfo.blockSize != 0,
11739 VmaPool_T::~VmaPool_T()
11743 void VmaPool_T::SetName(
const char* pName)
11745 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
11746 VmaFreeString(allocs, m_Name);
11748 if(pName != VMA_NULL)
11750 m_Name = VmaCreateStringCopy(allocs, pName);
11758 #if VMA_STATS_STRING_ENABLED
11760 #endif // #if VMA_STATS_STRING_ENABLED
11762 VmaBlockVector::VmaBlockVector(
11765 uint32_t memoryTypeIndex,
11766 VkDeviceSize preferredBlockSize,
11767 size_t minBlockCount,
11768 size_t maxBlockCount,
11769 VkDeviceSize bufferImageGranularity,
11770 uint32_t frameInUseCount,
11771 bool explicitBlockSize,
11772 uint32_t algorithm) :
11773 m_hAllocator(hAllocator),
11774 m_hParentPool(hParentPool),
11775 m_MemoryTypeIndex(memoryTypeIndex),
11776 m_PreferredBlockSize(preferredBlockSize),
11777 m_MinBlockCount(minBlockCount),
11778 m_MaxBlockCount(maxBlockCount),
11779 m_BufferImageGranularity(bufferImageGranularity),
11780 m_FrameInUseCount(frameInUseCount),
11781 m_ExplicitBlockSize(explicitBlockSize),
11782 m_Algorithm(algorithm),
11783 m_HasEmptyBlock(false),
11784 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11789 VmaBlockVector::~VmaBlockVector()
11791 for(
size_t i = m_Blocks.size(); i--; )
11793 m_Blocks[i]->Destroy(m_hAllocator);
11794 vma_delete(m_hAllocator, m_Blocks[i]);
11798 VkResult VmaBlockVector::CreateMinBlocks()
11800 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11802 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11803 if(res != VK_SUCCESS)
11811 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11813 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11815 const size_t blockCount = m_Blocks.size();
11824 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11826 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11827 VMA_ASSERT(pBlock);
11828 VMA_HEAVY_ASSERT(pBlock->Validate());
11829 pBlock->m_pMetadata->AddPoolStats(*pStats);
11833 bool VmaBlockVector::IsEmpty()
11835 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11836 return m_Blocks.empty();
11839 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
11841 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11842 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11843 (VMA_DEBUG_MARGIN > 0) &&
11845 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11848 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11850 VkResult VmaBlockVector::Allocate(
11851 uint32_t currentFrameIndex,
11853 VkDeviceSize alignment,
11855 VmaSuballocationType suballocType,
11856 size_t allocationCount,
11860 VkResult res = VK_SUCCESS;
11862 if(IsCorruptionDetectionEnabled())
11864 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11865 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11869 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11870 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11872 res = AllocatePage(
11878 pAllocations + allocIndex);
11879 if(res != VK_SUCCESS)
11886 if(res != VK_SUCCESS)
11889 while(allocIndex--)
11891 Free(pAllocations[allocIndex]);
11893 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11899 VkResult VmaBlockVector::AllocatePage(
11900 uint32_t currentFrameIndex,
11902 VkDeviceSize alignment,
11904 VmaSuballocationType suballocType,
11913 VkDeviceSize freeMemory;
11915 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
11917 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
11921 const bool canFallbackToDedicated = !IsCustomPool();
11922 const bool canCreateNewBlock =
11924 (m_Blocks.size() < m_MaxBlockCount) &&
11925 (freeMemory >= size || !canFallbackToDedicated);
11932 canMakeOtherLost =
false;
11936 if(isUpperAddress &&
11939 return VK_ERROR_FEATURE_NOT_PRESENT;
11953 return VK_ERROR_FEATURE_NOT_PRESENT;
11957 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11959 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11967 if(!canMakeOtherLost || canCreateNewBlock)
11976 if(!m_Blocks.empty())
11978 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11979 VMA_ASSERT(pCurrBlock);
11980 VkResult res = AllocateFromBlock(
11990 if(res == VK_SUCCESS)
11992 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12002 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12004 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12005 VMA_ASSERT(pCurrBlock);
12006 VkResult res = AllocateFromBlock(
12016 if(res == VK_SUCCESS)
12018 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12026 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12028 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12029 VMA_ASSERT(pCurrBlock);
12030 VkResult res = AllocateFromBlock(
12040 if(res == VK_SUCCESS)
12042 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12050 if(canCreateNewBlock)
12053 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12054 uint32_t newBlockSizeShift = 0;
12055 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12057 if(!m_ExplicitBlockSize)
12060 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12061 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12063 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12064 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12066 newBlockSize = smallerNewBlockSize;
12067 ++newBlockSizeShift;
12076 size_t newBlockIndex = 0;
12077 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12078 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12080 if(!m_ExplicitBlockSize)
12082 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12084 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12085 if(smallerNewBlockSize >= size)
12087 newBlockSize = smallerNewBlockSize;
12088 ++newBlockSizeShift;
12089 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12090 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12099 if(res == VK_SUCCESS)
12101 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12102 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12104 res = AllocateFromBlock(
12114 if(res == VK_SUCCESS)
12116 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12122 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12129 if(canMakeOtherLost)
12131 uint32_t tryIndex = 0;
12132 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12134 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12135 VmaAllocationRequest bestRequest = {};
12136 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12142 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12144 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12145 VMA_ASSERT(pCurrBlock);
12146 VmaAllocationRequest currRequest = {};
12147 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12150 m_BufferImageGranularity,
12159 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12160 if(pBestRequestBlock == VMA_NULL ||
12161 currRequestCost < bestRequestCost)
12163 pBestRequestBlock = pCurrBlock;
12164 bestRequest = currRequest;
12165 bestRequestCost = currRequestCost;
12167 if(bestRequestCost == 0)
12178 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12180 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12181 VMA_ASSERT(pCurrBlock);
12182 VmaAllocationRequest currRequest = {};
12183 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12186 m_BufferImageGranularity,
12195 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12196 if(pBestRequestBlock == VMA_NULL ||
12197 currRequestCost < bestRequestCost ||
12200 pBestRequestBlock = pCurrBlock;
12201 bestRequest = currRequest;
12202 bestRequestCost = currRequestCost;
12204 if(bestRequestCost == 0 ||
12214 if(pBestRequestBlock != VMA_NULL)
12218 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12219 if(res != VK_SUCCESS)
12225 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12231 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12232 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12233 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12234 UpdateHasEmptyBlock();
12235 (*pAllocation)->InitBlockAllocation(
12237 bestRequest.offset,
12244 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12245 VMA_DEBUG_LOG(
" Returned from existing block");
12246 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12247 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12248 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12250 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12252 if(IsCorruptionDetectionEnabled())
12254 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12255 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12270 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12272 return VK_ERROR_TOO_MANY_OBJECTS;
12276 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12279 void VmaBlockVector::Free(
12282 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12284 bool budgetExceeded =
false;
12286 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12288 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12289 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12294 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12296 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12298 if(IsCorruptionDetectionEnabled())
12300 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12301 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12304 if(hAllocation->IsPersistentMap())
12306 pBlock->Unmap(m_hAllocator, 1);
12309 pBlock->m_pMetadata->Free(hAllocation);
12310 VMA_HEAVY_ASSERT(pBlock->Validate());
12312 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12314 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12316 if(pBlock->m_pMetadata->IsEmpty())
12319 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12321 pBlockToDelete = pBlock;
12328 else if(m_HasEmptyBlock && canDeleteBlock)
12330 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12331 if(pLastBlock->m_pMetadata->IsEmpty())
12333 pBlockToDelete = pLastBlock;
12334 m_Blocks.pop_back();
12338 UpdateHasEmptyBlock();
12339 IncrementallySortBlocks();
12344 if(pBlockToDelete != VMA_NULL)
12346 VMA_DEBUG_LOG(
" Deleted empty block");
12347 pBlockToDelete->Destroy(m_hAllocator);
12348 vma_delete(m_hAllocator, pBlockToDelete);
12352 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12354 VkDeviceSize result = 0;
12355 for(
size_t i = m_Blocks.size(); i--; )
12357 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12358 if(result >= m_PreferredBlockSize)
12366 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12368 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12370 if(m_Blocks[blockIndex] == pBlock)
12372 VmaVectorRemove(m_Blocks, blockIndex);
12379 void VmaBlockVector::IncrementallySortBlocks()
12384 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12386 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12388 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12395 VkResult VmaBlockVector::AllocateFromBlock(
12396 VmaDeviceMemoryBlock* pBlock,
12397 uint32_t currentFrameIndex,
12399 VkDeviceSize alignment,
12402 VmaSuballocationType suballocType,
12411 VmaAllocationRequest currRequest = {};
12412 if(pBlock->m_pMetadata->CreateAllocationRequest(
12415 m_BufferImageGranularity,
12425 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12429 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12430 if(res != VK_SUCCESS)
12436 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12437 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12438 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12439 UpdateHasEmptyBlock();
12440 (*pAllocation)->InitBlockAllocation(
12442 currRequest.offset,
12449 VMA_HEAVY_ASSERT(pBlock->Validate());
12450 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12451 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12452 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12454 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12456 if(IsCorruptionDetectionEnabled())
12458 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12459 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12463 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12466 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12468 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12469 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12470 allocInfo.allocationSize = blockSize;
12471 VkDeviceMemory mem = VK_NULL_HANDLE;
12472 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12481 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12487 allocInfo.allocationSize,
12491 m_Blocks.push_back(pBlock);
12492 if(pNewBlockIndex != VMA_NULL)
12494 *pNewBlockIndex = m_Blocks.size() - 1;
12500 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12501 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12502 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12504 const size_t blockCount = m_Blocks.size();
12505 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12509 BLOCK_FLAG_USED = 0x00000001,
12510 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12518 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12519 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12520 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12523 const size_t moveCount = moves.size();
12524 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12526 const VmaDefragmentationMove& move = moves[moveIndex];
12527 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12528 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12531 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12534 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12536 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12537 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12538 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12540 currBlockInfo.pMappedData = pBlock->GetMappedData();
12542 if(currBlockInfo.pMappedData == VMA_NULL)
12544 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12545 if(pDefragCtx->res == VK_SUCCESS)
12547 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12554 if(pDefragCtx->res == VK_SUCCESS)
12556 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12557 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12559 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12561 const VmaDefragmentationMove& move = moves[moveIndex];
12563 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12564 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12566 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12571 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12572 memRange.memory = pSrcBlock->GetDeviceMemory();
12573 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12574 memRange.size = VMA_MIN(
12575 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12576 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12577 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12582 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12583 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12584 static_cast<size_t>(move.size));
12586 if(IsCorruptionDetectionEnabled())
12588 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12589 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12595 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12596 memRange.memory = pDstBlock->GetDeviceMemory();
12597 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12598 memRange.size = VMA_MIN(
12599 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12600 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12601 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12608 for(
size_t blockIndex = blockCount; blockIndex--; )
12610 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12611 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12613 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12614 pBlock->Unmap(m_hAllocator, 1);
12619 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12620 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12621 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12622 VkCommandBuffer commandBuffer)
12624 const size_t blockCount = m_Blocks.size();
12626 pDefragCtx->blockContexts.resize(blockCount);
12627 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12630 const size_t moveCount = moves.size();
12631 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12633 const VmaDefragmentationMove& move = moves[moveIndex];
12634 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12635 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12638 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12642 VkBufferCreateInfo bufCreateInfo;
12643 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12645 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12647 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12648 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12649 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12651 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12652 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12653 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12654 if(pDefragCtx->res == VK_SUCCESS)
12656 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12657 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12664 if(pDefragCtx->res == VK_SUCCESS)
12666 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12668 const VmaDefragmentationMove& move = moves[moveIndex];
12670 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12671 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12673 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12675 VkBufferCopy region = {
12679 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12680 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12685 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12687 pDefragCtx->res = VK_NOT_READY;
12693 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12695 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12696 if(pBlock->m_pMetadata->IsEmpty())
12698 if(m_Blocks.size() > m_MinBlockCount)
12700 if(pDefragmentationStats != VMA_NULL)
12703 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12706 VmaVectorRemove(m_Blocks, blockIndex);
12707 pBlock->Destroy(m_hAllocator);
12708 vma_delete(m_hAllocator, pBlock);
12716 UpdateHasEmptyBlock();
12719 void VmaBlockVector::UpdateHasEmptyBlock()
12721 m_HasEmptyBlock =
false;
12722 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
12724 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
12725 if(pBlock->m_pMetadata->IsEmpty())
12727 m_HasEmptyBlock =
true;
12733 #if VMA_STATS_STRING_ENABLED
12735 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12737 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12739 json.BeginObject();
12743 const char* poolName = m_hParentPool->GetName();
12744 if(poolName != VMA_NULL && poolName[0] !=
'\0')
12746 json.WriteString(
"Name");
12747 json.WriteString(poolName);
12750 json.WriteString(
"MemoryTypeIndex");
12751 json.WriteNumber(m_MemoryTypeIndex);
12753 json.WriteString(
"BlockSize");
12754 json.WriteNumber(m_PreferredBlockSize);
12756 json.WriteString(
"BlockCount");
12757 json.BeginObject(
true);
12758 if(m_MinBlockCount > 0)
12760 json.WriteString(
"Min");
12761 json.WriteNumber((uint64_t)m_MinBlockCount);
12763 if(m_MaxBlockCount < SIZE_MAX)
12765 json.WriteString(
"Max");
12766 json.WriteNumber((uint64_t)m_MaxBlockCount);
12768 json.WriteString(
"Cur");
12769 json.WriteNumber((uint64_t)m_Blocks.size());
12772 if(m_FrameInUseCount > 0)
12774 json.WriteString(
"FrameInUseCount");
12775 json.WriteNumber(m_FrameInUseCount);
12778 if(m_Algorithm != 0)
12780 json.WriteString(
"Algorithm");
12781 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12786 json.WriteString(
"PreferredBlockSize");
12787 json.WriteNumber(m_PreferredBlockSize);
12790 json.WriteString(
"Blocks");
12791 json.BeginObject();
12792 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12794 json.BeginString();
12795 json.ContinueString(m_Blocks[i]->GetId());
12798 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12805 #endif // #if VMA_STATS_STRING_ENABLED
12807 void VmaBlockVector::Defragment(
12808 class VmaBlockVectorDefragmentationContext* pCtx,
12810 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12811 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12812 VkCommandBuffer commandBuffer)
12814 pCtx->res = VK_SUCCESS;
12816 const VkMemoryPropertyFlags memPropFlags =
12817 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12818 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12820 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12822 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12823 !IsCorruptionDetectionEnabled() &&
12824 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12827 if(canDefragmentOnCpu || canDefragmentOnGpu)
12829 bool defragmentOnGpu;
12831 if(canDefragmentOnGpu != canDefragmentOnCpu)
12833 defragmentOnGpu = canDefragmentOnGpu;
12838 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12839 m_hAllocator->IsIntegratedGpu();
12842 bool overlappingMoveSupported = !defragmentOnGpu;
12844 if(m_hAllocator->m_UseMutex)
12846 m_Mutex.LockWrite();
12847 pCtx->mutexLocked =
true;
12850 pCtx->Begin(overlappingMoveSupported);
12854 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12855 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12856 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12857 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12858 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12861 if(pStats != VMA_NULL)
12863 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12864 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12867 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12868 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12869 if(defragmentOnGpu)
12871 maxGpuBytesToMove -= bytesMoved;
12872 maxGpuAllocationsToMove -= allocationsMoved;
12876 maxCpuBytesToMove -= bytesMoved;
12877 maxCpuAllocationsToMove -= allocationsMoved;
12881 if(pCtx->res >= VK_SUCCESS)
12883 if(defragmentOnGpu)
12885 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12889 ApplyDefragmentationMovesCpu(pCtx, moves);
12895 void VmaBlockVector::DefragmentationEnd(
12896 class VmaBlockVectorDefragmentationContext* pCtx,
12900 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12902 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12903 if(blockCtx.hBuffer)
12905 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12906 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12910 if(pCtx->res >= VK_SUCCESS)
12912 FreeEmptyBlocks(pStats);
12915 if(pCtx->mutexLocked)
12917 VMA_ASSERT(m_hAllocator->m_UseMutex);
12918 m_Mutex.UnlockWrite();
12922 size_t VmaBlockVector::CalcAllocationCount()
const
12925 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12927 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12932 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
12934 if(m_BufferImageGranularity == 1)
12938 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12939 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12941 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12942 VMA_ASSERT(m_Algorithm == 0);
12943 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12944 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12952 void VmaBlockVector::MakePoolAllocationsLost(
12953 uint32_t currentFrameIndex,
12954 size_t* pLostAllocationCount)
12956 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12957 size_t lostAllocationCount = 0;
12958 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12960 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12961 VMA_ASSERT(pBlock);
12962 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12964 if(pLostAllocationCount != VMA_NULL)
12966 *pLostAllocationCount = lostAllocationCount;
12970 VkResult VmaBlockVector::CheckCorruption()
12972 if(!IsCorruptionDetectionEnabled())
12974 return VK_ERROR_FEATURE_NOT_PRESENT;
12977 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12978 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12980 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12981 VMA_ASSERT(pBlock);
12982 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12983 if(res != VK_SUCCESS)
12991 void VmaBlockVector::AddStats(
VmaStats* pStats)
12993 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12994 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12996 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12998 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13000 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13001 VMA_ASSERT(pBlock);
13002 VMA_HEAVY_ASSERT(pBlock->Validate());
13004 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13005 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13006 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13007 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13014 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13016 VmaBlockVector* pBlockVector,
13017 uint32_t currentFrameIndex,
13018 bool overlappingMoveSupported) :
13019 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13020 m_AllocationCount(0),
13021 m_AllAllocations(false),
13023 m_AllocationsMoved(0),
13024 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13027 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13028 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13030 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13031 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13032 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13033 m_Blocks.push_back(pBlockInfo);
13037 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13040 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13042 for(
size_t i = m_Blocks.size(); i--; )
13044 vma_delete(m_hAllocator, m_Blocks[i]);
13048 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13051 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13053 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13054 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13055 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13057 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13058 (*it)->m_Allocations.push_back(allocInfo);
13065 ++m_AllocationCount;
13069 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13070 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13071 VkDeviceSize maxBytesToMove,
13072 uint32_t maxAllocationsToMove)
13074 if(m_Blocks.empty())
13087 size_t srcBlockMinIndex = 0;
13100 size_t srcBlockIndex = m_Blocks.size() - 1;
13101 size_t srcAllocIndex = SIZE_MAX;
13107 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13109 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13112 if(srcBlockIndex == srcBlockMinIndex)
13119 srcAllocIndex = SIZE_MAX;
13124 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13128 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13129 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13131 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13132 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13133 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13134 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13137 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13139 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13140 VmaAllocationRequest dstAllocRequest;
13141 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13142 m_CurrentFrameIndex,
13143 m_pBlockVector->GetFrameInUseCount(),
13144 m_pBlockVector->GetBufferImageGranularity(),
13151 &dstAllocRequest) &&
13153 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13155 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13158 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13159 (m_BytesMoved + size > maxBytesToMove))
13164 VmaDefragmentationMove move;
13165 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13166 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13167 move.srcOffset = srcOffset;
13168 move.dstOffset = dstAllocRequest.offset;
13170 moves.push_back(move);
13172 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13176 allocInfo.m_hAllocation);
13177 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13179 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13181 if(allocInfo.m_pChanged != VMA_NULL)
13183 *allocInfo.m_pChanged = VK_TRUE;
13186 ++m_AllocationsMoved;
13187 m_BytesMoved += size;
13189 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13197 if(srcAllocIndex > 0)
13203 if(srcBlockIndex > 0)
13206 srcAllocIndex = SIZE_MAX;
13216 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13219 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13221 if(m_Blocks[i]->m_HasNonMovableAllocations)
13229 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13230 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13231 VkDeviceSize maxBytesToMove,
13232 uint32_t maxAllocationsToMove)
13234 if(!m_AllAllocations && m_AllocationCount == 0)
13239 const size_t blockCount = m_Blocks.size();
13240 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13242 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13244 if(m_AllAllocations)
13246 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13247 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13248 it != pMetadata->m_Suballocations.end();
13251 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13253 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13254 pBlockInfo->m_Allocations.push_back(allocInfo);
13259 pBlockInfo->CalcHasNonMovableAllocations();
13263 pBlockInfo->SortAllocationsByOffsetDescending();
13269 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
13272 const uint32_t roundCount = 2;
13275 VkResult result = VK_SUCCESS;
13276 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
13278 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
13284 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
13285 size_t dstBlockIndex, VkDeviceSize dstOffset,
13286 size_t srcBlockIndex, VkDeviceSize srcOffset)
13288 if(dstBlockIndex < srcBlockIndex)
13292 if(dstBlockIndex > srcBlockIndex)
13296 if(dstOffset < srcOffset)
13306 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
13308 VmaBlockVector* pBlockVector,
13309 uint32_t currentFrameIndex,
13310 bool overlappingMoveSupported) :
13311 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13312 m_OverlappingMoveSupported(overlappingMoveSupported),
13313 m_AllocationCount(0),
13314 m_AllAllocations(false),
13316 m_AllocationsMoved(0),
13317 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
13319 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
13323 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
13327 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
13328 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13329 VkDeviceSize maxBytesToMove,
13330 uint32_t maxAllocationsToMove)
13332 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
13334 const size_t blockCount = m_pBlockVector->GetBlockCount();
13335 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
13340 PreprocessMetadata();
13344 m_BlockInfos.resize(blockCount);
13345 for(
size_t i = 0; i < blockCount; ++i)
13347 m_BlockInfos[i].origBlockIndex = i;
13350 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13351 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13352 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13357 FreeSpaceDatabase freeSpaceDb;
13359 size_t dstBlockInfoIndex = 0;
13360 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13361 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13362 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13363 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13364 VkDeviceSize dstOffset = 0;
13367 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13369 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13370 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13371 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13372 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13373 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13375 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13376 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13377 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13378 if(m_AllocationsMoved == maxAllocationsToMove ||
13379 m_BytesMoved + srcAllocSize > maxBytesToMove)
13384 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13387 size_t freeSpaceInfoIndex;
13388 VkDeviceSize dstAllocOffset;
13389 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13390 freeSpaceInfoIndex, dstAllocOffset))
13392 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13393 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13394 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13397 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13399 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13403 VmaSuballocation suballoc = *srcSuballocIt;
13404 suballoc.offset = dstAllocOffset;
13405 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13406 m_BytesMoved += srcAllocSize;
13407 ++m_AllocationsMoved;
13409 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13411 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13412 srcSuballocIt = nextSuballocIt;
13414 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13416 VmaDefragmentationMove move = {
13417 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13418 srcAllocOffset, dstAllocOffset,
13420 moves.push_back(move);
13427 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13429 VmaSuballocation suballoc = *srcSuballocIt;
13430 suballoc.offset = dstAllocOffset;
13431 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13432 m_BytesMoved += srcAllocSize;
13433 ++m_AllocationsMoved;
13435 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13437 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13438 srcSuballocIt = nextSuballocIt;
13440 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13442 VmaDefragmentationMove move = {
13443 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13444 srcAllocOffset, dstAllocOffset,
13446 moves.push_back(move);
13451 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13454 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13455 dstAllocOffset + srcAllocSize > dstBlockSize)
13458 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13460 ++dstBlockInfoIndex;
13461 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13462 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13463 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13464 dstBlockSize = pDstMetadata->GetSize();
13466 dstAllocOffset = 0;
13470 if(dstBlockInfoIndex == srcBlockInfoIndex)
13472 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13474 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13476 bool skipOver = overlap;
13477 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13481 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13486 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13488 dstOffset = srcAllocOffset + srcAllocSize;
13494 srcSuballocIt->offset = dstAllocOffset;
13495 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13496 dstOffset = dstAllocOffset + srcAllocSize;
13497 m_BytesMoved += srcAllocSize;
13498 ++m_AllocationsMoved;
13500 VmaDefragmentationMove move = {
13501 srcOrigBlockIndex, dstOrigBlockIndex,
13502 srcAllocOffset, dstAllocOffset,
13504 moves.push_back(move);
13512 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13513 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13515 VmaSuballocation suballoc = *srcSuballocIt;
13516 suballoc.offset = dstAllocOffset;
13517 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13518 dstOffset = dstAllocOffset + srcAllocSize;
13519 m_BytesMoved += srcAllocSize;
13520 ++m_AllocationsMoved;
13522 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13524 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13525 srcSuballocIt = nextSuballocIt;
13527 pDstMetadata->m_Suballocations.push_back(suballoc);
13529 VmaDefragmentationMove move = {
13530 srcOrigBlockIndex, dstOrigBlockIndex,
13531 srcAllocOffset, dstAllocOffset,
13533 moves.push_back(move);
13539 m_BlockInfos.clear();
13541 PostprocessMetadata();
13546 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13548 const size_t blockCount = m_pBlockVector->GetBlockCount();
13549 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13551 VmaBlockMetadata_Generic*
const pMetadata =
13552 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13553 pMetadata->m_FreeCount = 0;
13554 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13555 pMetadata->m_FreeSuballocationsBySize.clear();
13556 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13557 it != pMetadata->m_Suballocations.end(); )
13559 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13561 VmaSuballocationList::iterator nextIt = it;
13563 pMetadata->m_Suballocations.erase(it);
13574 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13576 const size_t blockCount = m_pBlockVector->GetBlockCount();
13577 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13579 VmaBlockMetadata_Generic*
const pMetadata =
13580 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13581 const VkDeviceSize blockSize = pMetadata->GetSize();
13584 if(pMetadata->m_Suballocations.empty())
13586 pMetadata->m_FreeCount = 1;
13588 VmaSuballocation suballoc = {
13592 VMA_SUBALLOCATION_TYPE_FREE };
13593 pMetadata->m_Suballocations.push_back(suballoc);
13594 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13599 VkDeviceSize offset = 0;
13600 VmaSuballocationList::iterator it;
13601 for(it = pMetadata->m_Suballocations.begin();
13602 it != pMetadata->m_Suballocations.end();
13605 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13606 VMA_ASSERT(it->offset >= offset);
13609 if(it->offset > offset)
13611 ++pMetadata->m_FreeCount;
13612 const VkDeviceSize freeSize = it->offset - offset;
13613 VmaSuballocation suballoc = {
13617 VMA_SUBALLOCATION_TYPE_FREE };
13618 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13619 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13621 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13625 pMetadata->m_SumFreeSize -= it->size;
13626 offset = it->offset + it->size;
13630 if(offset < blockSize)
13632 ++pMetadata->m_FreeCount;
13633 const VkDeviceSize freeSize = blockSize - offset;
13634 VmaSuballocation suballoc = {
13638 VMA_SUBALLOCATION_TYPE_FREE };
13639 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13640 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13641 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13643 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13648 pMetadata->m_FreeSuballocationsBySize.begin(),
13649 pMetadata->m_FreeSuballocationsBySize.end(),
13650 VmaSuballocationItemSizeLess());
13653 VMA_HEAVY_ASSERT(pMetadata->Validate());
13657 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13660 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13661 while(it != pMetadata->m_Suballocations.end())
13663 if(it->offset < suballoc.offset)
13668 pMetadata->m_Suballocations.insert(it, suballoc);
13674 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13677 VmaBlockVector* pBlockVector,
13678 uint32_t currFrameIndex) :
13680 mutexLocked(false),
13681 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13682 m_hAllocator(hAllocator),
13683 m_hCustomPool(hCustomPool),
13684 m_pBlockVector(pBlockVector),
13685 m_CurrFrameIndex(currFrameIndex),
13686 m_pAlgorithm(VMA_NULL),
13687 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13688 m_AllAllocations(false)
13692 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13694 vma_delete(m_hAllocator, m_pAlgorithm);
13697 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13699 AllocInfo info = { hAlloc, pChanged };
13700 m_Allocations.push_back(info);
13703 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13705 const bool allAllocations = m_AllAllocations ||
13706 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13718 if(VMA_DEBUG_MARGIN == 0 &&
13720 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13722 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13723 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13727 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13728 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13733 m_pAlgorithm->AddAll();
13737 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13739 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13747 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13749 uint32_t currFrameIndex,
13752 m_hAllocator(hAllocator),
13753 m_CurrFrameIndex(currFrameIndex),
13756 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13758 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13761 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13763 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13765 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13766 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13767 vma_delete(m_hAllocator, pBlockVectorCtx);
13769 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13771 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13772 if(pBlockVectorCtx)
13774 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13775 vma_delete(m_hAllocator, pBlockVectorCtx);
13780 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13782 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13784 VmaPool pool = pPools[poolIndex];
13787 if(pool->m_BlockVector.GetAlgorithm() == 0)
13789 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13791 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13793 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13795 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13800 if(!pBlockVectorDefragCtx)
13802 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13805 &pool->m_BlockVector,
13807 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13810 pBlockVectorDefragCtx->AddAll();
13815 void VmaDefragmentationContext_T::AddAllocations(
13816 uint32_t allocationCount,
13818 VkBool32* pAllocationsChanged)
13821 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13824 VMA_ASSERT(hAlloc);
13826 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13828 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13830 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13832 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13834 if(hAllocPool != VK_NULL_HANDLE)
13837 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13839 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13841 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13843 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13847 if(!pBlockVectorDefragCtx)
13849 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13852 &hAllocPool->m_BlockVector,
13854 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13861 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13862 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13863 if(!pBlockVectorDefragCtx)
13865 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13868 m_hAllocator->m_pBlockVectors[memTypeIndex],
13870 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13874 if(pBlockVectorDefragCtx)
13876 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13877 &pAllocationsChanged[allocIndex] : VMA_NULL;
13878 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13884 VkResult VmaDefragmentationContext_T::Defragment(
13885 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13886 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13894 if(commandBuffer == VK_NULL_HANDLE)
13896 maxGpuBytesToMove = 0;
13897 maxGpuAllocationsToMove = 0;
13900 VkResult res = VK_SUCCESS;
13903 for(uint32_t memTypeIndex = 0;
13904 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13907 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13908 if(pBlockVectorCtx)
13910 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13911 pBlockVectorCtx->GetBlockVector()->Defragment(
13914 maxCpuBytesToMove, maxCpuAllocationsToMove,
13915 maxGpuBytesToMove, maxGpuAllocationsToMove,
13917 if(pBlockVectorCtx->res != VK_SUCCESS)
13919 res = pBlockVectorCtx->res;
13925 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13926 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13929 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13930 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13931 pBlockVectorCtx->GetBlockVector()->Defragment(
13934 maxCpuBytesToMove, maxCpuAllocationsToMove,
13935 maxGpuBytesToMove, maxGpuAllocationsToMove,
13937 if(pBlockVectorCtx->res != VK_SUCCESS)
13939 res = pBlockVectorCtx->res;
13949 #if VMA_RECORDING_ENABLED
13951 VmaRecorder::VmaRecorder() :
13956 m_StartCounter(INT64_MAX)
13962 m_UseMutex = useMutex;
13963 m_Flags = settings.
flags;
13965 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13966 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13969 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13972 return VK_ERROR_INITIALIZATION_FAILED;
13976 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13977 fprintf(m_File,
"%s\n",
"1,8");
13982 VmaRecorder::~VmaRecorder()
13984 if(m_File != VMA_NULL)
13990 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13992 CallParams callParams;
13993 GetBasicParams(callParams);
13995 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13996 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
14000 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
14002 CallParams callParams;
14003 GetBasicParams(callParams);
14005 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14006 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
14012 CallParams callParams;
14013 GetBasicParams(callParams);
14015 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14016 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
14027 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
14029 CallParams callParams;
14030 GetBasicParams(callParams);
14032 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14033 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
14038 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
14039 const VkMemoryRequirements& vkMemReq,
14043 CallParams callParams;
14044 GetBasicParams(callParams);
14046 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14047 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14048 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14050 vkMemReq.alignment,
14051 vkMemReq.memoryTypeBits,
14059 userDataStr.GetString());
14063 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
14064 const VkMemoryRequirements& vkMemReq,
14066 uint64_t allocationCount,
14069 CallParams callParams;
14070 GetBasicParams(callParams);
14072 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14073 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14074 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14076 vkMemReq.alignment,
14077 vkMemReq.memoryTypeBits,
14084 PrintPointerList(allocationCount, pAllocations);
14085 fprintf(m_File,
",%s\n", userDataStr.GetString());
14089 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
14090 const VkMemoryRequirements& vkMemReq,
14091 bool requiresDedicatedAllocation,
14092 bool prefersDedicatedAllocation,
14096 CallParams callParams;
14097 GetBasicParams(callParams);
14099 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14100 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14101 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14103 vkMemReq.alignment,
14104 vkMemReq.memoryTypeBits,
14105 requiresDedicatedAllocation ? 1 : 0,
14106 prefersDedicatedAllocation ? 1 : 0,
14114 userDataStr.GetString());
14118 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
14119 const VkMemoryRequirements& vkMemReq,
14120 bool requiresDedicatedAllocation,
14121 bool prefersDedicatedAllocation,
14125 CallParams callParams;
14126 GetBasicParams(callParams);
14128 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14129 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14130 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14132 vkMemReq.alignment,
14133 vkMemReq.memoryTypeBits,
14134 requiresDedicatedAllocation ? 1 : 0,
14135 prefersDedicatedAllocation ? 1 : 0,
14143 userDataStr.GetString());
14147 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
14150 CallParams callParams;
14151 GetBasicParams(callParams);
14153 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14154 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14159 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
14160 uint64_t allocationCount,
14163 CallParams callParams;
14164 GetBasicParams(callParams);
14166 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14167 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
14168 PrintPointerList(allocationCount, pAllocations);
14169 fprintf(m_File,
"\n");
14173 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
14175 const void* pUserData)
14177 CallParams callParams;
14178 GetBasicParams(callParams);
14180 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14181 UserDataString userDataStr(
14184 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14186 userDataStr.GetString());
14190 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
14193 CallParams callParams;
14194 GetBasicParams(callParams);
14196 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14197 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14202 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
14205 CallParams callParams;
14206 GetBasicParams(callParams);
14208 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14209 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14214 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
14217 CallParams callParams;
14218 GetBasicParams(callParams);
14220 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14221 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14226 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
14227 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14229 CallParams callParams;
14230 GetBasicParams(callParams);
14232 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14233 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14240 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
14241 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14243 CallParams callParams;
14244 GetBasicParams(callParams);
14246 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14247 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14254 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
14255 const VkBufferCreateInfo& bufCreateInfo,
14259 CallParams callParams;
14260 GetBasicParams(callParams);
14262 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14263 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14264 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14265 bufCreateInfo.flags,
14266 bufCreateInfo.size,
14267 bufCreateInfo.usage,
14268 bufCreateInfo.sharingMode,
14269 allocCreateInfo.
flags,
14270 allocCreateInfo.
usage,
14274 allocCreateInfo.
pool,
14276 userDataStr.GetString());
14280 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
14281 const VkImageCreateInfo& imageCreateInfo,
14285 CallParams callParams;
14286 GetBasicParams(callParams);
14288 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14289 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14290 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14291 imageCreateInfo.flags,
14292 imageCreateInfo.imageType,
14293 imageCreateInfo.format,
14294 imageCreateInfo.extent.width,
14295 imageCreateInfo.extent.height,
14296 imageCreateInfo.extent.depth,
14297 imageCreateInfo.mipLevels,
14298 imageCreateInfo.arrayLayers,
14299 imageCreateInfo.samples,
14300 imageCreateInfo.tiling,
14301 imageCreateInfo.usage,
14302 imageCreateInfo.sharingMode,
14303 imageCreateInfo.initialLayout,
14304 allocCreateInfo.
flags,
14305 allocCreateInfo.
usage,
14309 allocCreateInfo.
pool,
14311 userDataStr.GetString());
14315 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
14318 CallParams callParams;
14319 GetBasicParams(callParams);
14321 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14322 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
14327 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14330 CallParams callParams;
14331 GetBasicParams(callParams);
14333 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14334 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
14339 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
14342 CallParams callParams;
14343 GetBasicParams(callParams);
14345 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14346 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14351 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14354 CallParams callParams;
14355 GetBasicParams(callParams);
14357 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14358 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14363 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14366 CallParams callParams;
14367 GetBasicParams(callParams);
14369 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14370 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14375 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14379 CallParams callParams;
14380 GetBasicParams(callParams);
14382 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14383 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14386 fprintf(m_File,
",");
14388 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14398 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14401 CallParams callParams;
14402 GetBasicParams(callParams);
14404 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14405 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14410 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
14414 CallParams callParams;
14415 GetBasicParams(callParams);
14417 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14418 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14419 pool, name != VMA_NULL ? name :
"");
14425 if(pUserData != VMA_NULL)
14429 m_Str = (
const char*)pUserData;
14433 sprintf_s(m_PtrStr,
"%p", pUserData);
14443 void VmaRecorder::WriteConfiguration(
14444 const VkPhysicalDeviceProperties& devProps,
14445 const VkPhysicalDeviceMemoryProperties& memProps,
14446 uint32_t vulkanApiVersion,
14447 bool dedicatedAllocationExtensionEnabled,
14448 bool bindMemory2ExtensionEnabled,
14449 bool memoryBudgetExtensionEnabled)
14451 fprintf(m_File,
"Config,Begin\n");
14453 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
14455 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14456 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14457 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14458 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14459 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14460 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14462 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14463 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14464 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14466 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14467 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14469 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14470 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14472 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14473 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14475 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14476 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14479 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14480 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
14481 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
14483 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14484 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14485 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14486 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14487 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14488 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14489 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14490 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14491 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14493 fprintf(m_File,
"Config,End\n");
14496 void VmaRecorder::GetBasicParams(CallParams& outParams)
14498 outParams.threadId = GetCurrentThreadId();
14500 LARGE_INTEGER counter;
14501 QueryPerformanceCounter(&counter);
14502 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14505 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14509 fprintf(m_File,
"%p", pItems[0]);
14510 for(uint64_t i = 1; i < count; ++i)
14512 fprintf(m_File,
" %p", pItems[i]);
14517 void VmaRecorder::Flush()
14525 #endif // #if VMA_RECORDING_ENABLED
14530 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14531 m_Allocator(pAllocationCallbacks, 1024)
14537 VmaMutexLock mutexLock(m_Mutex);
14538 return m_Allocator.Alloc();
14541 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14543 VmaMutexLock mutexLock(m_Mutex);
14544 m_Allocator.Free(hAlloc);
14552 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
14556 m_hDevice(pCreateInfo->device),
14557 m_hInstance(pCreateInfo->instance),
14558 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14559 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14560 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14561 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14562 m_HeapSizeLimitMask(0),
14563 m_PreferredLargeHeapBlockSize(0),
14564 m_PhysicalDevice(pCreateInfo->physicalDevice),
14565 m_CurrentFrameIndex(0),
14566 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14567 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14570 ,m_pRecorder(VMA_NULL)
14573 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14575 m_UseKhrDedicatedAllocation =
false;
14576 m_UseKhrBindMemory2 =
false;
14579 if(VMA_DEBUG_DETECT_CORRUPTION)
14582 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14587 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
14589 #if !(VMA_DEDICATED_ALLOCATION)
14592 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14595 #if !(VMA_BIND_MEMORY2)
14598 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
14602 #if !(VMA_MEMORY_BUDGET)
14605 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
14608 #if VMA_VULKAN_VERSION < 1001000
14609 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14611 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
14615 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14616 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14617 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14619 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14620 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14621 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
14631 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14632 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14634 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14635 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14636 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14637 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14644 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14646 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14647 if(limit != VK_WHOLE_SIZE)
14649 m_HeapSizeLimitMask |= 1u << heapIndex;
14650 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14652 m_MemProps.memoryHeaps[heapIndex].size = limit;
14658 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14660 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14662 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14666 preferredBlockSize,
14669 GetBufferImageGranularity(),
14675 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14682 VkResult res = VK_SUCCESS;
14687 #if VMA_RECORDING_ENABLED
14688 m_pRecorder = vma_new(
this, VmaRecorder)();
14690 if(res != VK_SUCCESS)
14694 m_pRecorder->WriteConfiguration(
14695 m_PhysicalDeviceProperties,
14697 m_VulkanApiVersion,
14698 m_UseKhrDedicatedAllocation,
14699 m_UseKhrBindMemory2,
14700 m_UseExtMemoryBudget);
14701 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14703 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14704 return VK_ERROR_FEATURE_NOT_PRESENT;
14708 #if VMA_MEMORY_BUDGET
14709 if(m_UseExtMemoryBudget)
14711 UpdateVulkanBudget();
14713 #endif // #if VMA_MEMORY_BUDGET
14718 VmaAllocator_T::~VmaAllocator_T()
14720 #if VMA_RECORDING_ENABLED
14721 if(m_pRecorder != VMA_NULL)
14723 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14724 vma_delete(
this, m_pRecorder);
14728 VMA_ASSERT(m_Pools.empty());
14730 for(
size_t i = GetMemoryTypeCount(); i--; )
14732 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14734 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14737 vma_delete(
this, m_pDedicatedAllocations[i]);
14738 vma_delete(
this, m_pBlockVectors[i]);
14742 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14744 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14745 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14746 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14747 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14748 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14749 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14750 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14751 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14752 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14753 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14754 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14755 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14756 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14757 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14758 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14759 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14760 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14761 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14762 #if VMA_VULKAN_VERSION >= 1001000
14763 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14765 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
14766 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14767 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2");
14768 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14769 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2");
14770 m_VulkanFunctions.vkBindBufferMemory2KHR =
14771 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2");
14772 m_VulkanFunctions.vkBindImageMemory2KHR =
14773 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2");
14774 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
14775 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2");
14778 #if VMA_DEDICATED_ALLOCATION
14779 if(m_UseKhrDedicatedAllocation)
14781 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14782 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14783 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14784 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14787 #if VMA_BIND_MEMORY2
14788 if(m_UseKhrBindMemory2)
14790 m_VulkanFunctions.vkBindBufferMemory2KHR =
14791 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2KHR");
14792 m_VulkanFunctions.vkBindImageMemory2KHR =
14793 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2KHR");
14795 #endif // #if VMA_BIND_MEMORY2
14796 #if VMA_MEMORY_BUDGET
14797 if(m_UseExtMemoryBudget && m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
14799 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
14800 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
14801 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2KHR");
14803 #endif // #if VMA_MEMORY_BUDGET
14804 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14806 #define VMA_COPY_IF_NOT_NULL(funcName) \
14807 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
14809 if(pVulkanFunctions != VMA_NULL)
14811 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14812 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14813 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14814 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14815 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14816 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14817 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14818 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14819 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14820 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14821 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14822 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14823 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14824 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14825 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14826 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14827 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14828 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
14829 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14830 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14832 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
14833 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
14834 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
14836 #if VMA_MEMORY_BUDGET
14837 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
14841 #undef VMA_COPY_IF_NOT_NULL
14845 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14846 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14847 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14848 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14849 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14850 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14851 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14852 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14853 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14854 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14855 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14856 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14857 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14858 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14859 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14860 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14861 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14862 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
14863 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
14865 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14866 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14869 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
14870 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
14872 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
14873 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
14876 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
14877 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14879 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
14884 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14886 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14887 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14888 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14889 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
14892 VkResult VmaAllocator_T::AllocateMemoryOfType(
14894 VkDeviceSize alignment,
14895 bool dedicatedAllocation,
14896 VkBuffer dedicatedBuffer,
14897 VkImage dedicatedImage,
14899 uint32_t memTypeIndex,
14900 VmaSuballocationType suballocType,
14901 size_t allocationCount,
14904 VMA_ASSERT(pAllocations != VMA_NULL);
14905 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14911 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14921 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14922 VMA_ASSERT(blockVector);
14924 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14925 bool preferDedicatedMemory =
14926 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14927 dedicatedAllocation ||
14929 size > preferredBlockSize / 2;
14931 if(preferDedicatedMemory &&
14933 finalCreateInfo.
pool == VK_NULL_HANDLE)
14942 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14946 return AllocateDedicatedMemory(
14962 VkResult res = blockVector->Allocate(
14963 m_CurrentFrameIndex.load(),
14970 if(res == VK_SUCCESS)
14978 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14982 res = AllocateDedicatedMemory(
14989 finalCreateInfo.pUserData,
14994 if(res == VK_SUCCESS)
14997 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
15003 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15010 VkResult VmaAllocator_T::AllocateDedicatedMemory(
15012 VmaSuballocationType suballocType,
15013 uint32_t memTypeIndex,
15016 bool isUserDataString,
15018 VkBuffer dedicatedBuffer,
15019 VkImage dedicatedImage,
15020 size_t allocationCount,
15023 VMA_ASSERT(allocationCount > 0 && pAllocations);
15027 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15029 GetBudget(&heapBudget, heapIndex, 1);
15030 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
15032 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15036 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
15037 allocInfo.memoryTypeIndex = memTypeIndex;
15038 allocInfo.allocationSize = size;
15040 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15041 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
15042 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15044 if(dedicatedBuffer != VK_NULL_HANDLE)
15046 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
15047 dedicatedAllocInfo.buffer = dedicatedBuffer;
15048 allocInfo.pNext = &dedicatedAllocInfo;
15050 else if(dedicatedImage != VK_NULL_HANDLE)
15052 dedicatedAllocInfo.image = dedicatedImage;
15053 allocInfo.pNext = &dedicatedAllocInfo;
15056 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15059 VkResult res = VK_SUCCESS;
15060 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15062 res = AllocateDedicatedMemoryPage(
15070 pAllocations + allocIndex);
15071 if(res != VK_SUCCESS)
15077 if(res == VK_SUCCESS)
15081 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15082 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15083 VMA_ASSERT(pDedicatedAllocations);
15084 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15086 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
15090 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
15095 while(allocIndex--)
15098 VkDeviceMemory hMemory = currAlloc->GetMemory();
15110 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
15111 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
15112 currAlloc->SetUserData(
this, VMA_NULL);
15114 m_AllocationObjectAllocator.Free(currAlloc);
15117 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15123 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
15125 VmaSuballocationType suballocType,
15126 uint32_t memTypeIndex,
15127 const VkMemoryAllocateInfo& allocInfo,
15129 bool isUserDataString,
15133 VkDeviceMemory hMemory = VK_NULL_HANDLE;
15134 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
15137 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15141 void* pMappedData = VMA_NULL;
15144 res = (*m_VulkanFunctions.vkMapMemory)(
15153 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
15154 FreeVulkanMemory(memTypeIndex, size, hMemory);
15159 *pAllocation = m_AllocationObjectAllocator.Allocate();
15160 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
15161 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
15162 (*pAllocation)->SetUserData(
this, pUserData);
15163 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
15164 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15166 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
15172 void VmaAllocator_T::GetBufferMemoryRequirements(
15174 VkMemoryRequirements& memReq,
15175 bool& requiresDedicatedAllocation,
15176 bool& prefersDedicatedAllocation)
const
15178 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15179 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15181 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
15182 memReqInfo.buffer = hBuffer;
15184 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15186 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15187 memReq2.pNext = &memDedicatedReq;
15189 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15191 memReq = memReq2.memoryRequirements;
15192 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15193 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15196 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15198 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
15199 requiresDedicatedAllocation =
false;
15200 prefersDedicatedAllocation =
false;
15204 void VmaAllocator_T::GetImageMemoryRequirements(
15206 VkMemoryRequirements& memReq,
15207 bool& requiresDedicatedAllocation,
15208 bool& prefersDedicatedAllocation)
const
15210 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15211 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15213 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
15214 memReqInfo.image = hImage;
15216 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15218 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15219 memReq2.pNext = &memDedicatedReq;
15221 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15223 memReq = memReq2.memoryRequirements;
15224 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15225 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15228 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15230 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
15231 requiresDedicatedAllocation =
false;
15232 prefersDedicatedAllocation =
false;
15236 VkResult VmaAllocator_T::AllocateMemory(
15237 const VkMemoryRequirements& vkMemReq,
15238 bool requiresDedicatedAllocation,
15239 bool prefersDedicatedAllocation,
15240 VkBuffer dedicatedBuffer,
15241 VkImage dedicatedImage,
15243 VmaSuballocationType suballocType,
15244 size_t allocationCount,
15247 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15249 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
15251 if(vkMemReq.size == 0)
15253 return VK_ERROR_VALIDATION_FAILED_EXT;
15258 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
15259 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15264 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
15265 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15267 if(requiresDedicatedAllocation)
15271 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
15272 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15274 if(createInfo.
pool != VK_NULL_HANDLE)
15276 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
15277 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15280 if((createInfo.
pool != VK_NULL_HANDLE) &&
15283 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
15284 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15287 if(createInfo.
pool != VK_NULL_HANDLE)
15289 const VkDeviceSize alignmentForPool = VMA_MAX(
15290 vkMemReq.alignment,
15291 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
15296 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15301 return createInfo.
pool->m_BlockVector.Allocate(
15302 m_CurrentFrameIndex.load(),
15313 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
15314 uint32_t memTypeIndex = UINT32_MAX;
15316 if(res == VK_SUCCESS)
15318 VkDeviceSize alignmentForMemType = VMA_MAX(
15319 vkMemReq.alignment,
15320 GetMemoryTypeMinAlignment(memTypeIndex));
15322 res = AllocateMemoryOfType(
15324 alignmentForMemType,
15325 requiresDedicatedAllocation || prefersDedicatedAllocation,
15334 if(res == VK_SUCCESS)
15344 memoryTypeBits &= ~(1u << memTypeIndex);
15347 if(res == VK_SUCCESS)
15349 alignmentForMemType = VMA_MAX(
15350 vkMemReq.alignment,
15351 GetMemoryTypeMinAlignment(memTypeIndex));
15353 res = AllocateMemoryOfType(
15355 alignmentForMemType,
15356 requiresDedicatedAllocation || prefersDedicatedAllocation,
15365 if(res == VK_SUCCESS)
15375 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15386 void VmaAllocator_T::FreeMemory(
15387 size_t allocationCount,
15390 VMA_ASSERT(pAllocations);
15392 for(
size_t allocIndex = allocationCount; allocIndex--; )
15396 if(allocation != VK_NULL_HANDLE)
15398 if(TouchAllocation(allocation))
15400 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15402 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
15405 switch(allocation->GetType())
15407 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15409 VmaBlockVector* pBlockVector = VMA_NULL;
15410 VmaPool hPool = allocation->GetBlock()->GetParentPool();
15411 if(hPool != VK_NULL_HANDLE)
15413 pBlockVector = &hPool->m_BlockVector;
15417 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15418 pBlockVector = m_pBlockVectors[memTypeIndex];
15420 pBlockVector->Free(allocation);
15423 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15424 FreeDedicatedMemory(allocation);
15432 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
15433 allocation->SetUserData(
this, VMA_NULL);
15434 allocation->Dtor();
15435 m_AllocationObjectAllocator.Free(allocation);
15440 VkResult VmaAllocator_T::ResizeAllocation(
15442 VkDeviceSize newSize)
15445 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
15447 return VK_ERROR_VALIDATION_FAILED_EXT;
15449 if(newSize == alloc->GetSize())
15453 return VK_ERROR_OUT_OF_POOL_MEMORY;
15456 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
15459 InitStatInfo(pStats->
total);
15460 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
15462 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
15466 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15468 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15469 VMA_ASSERT(pBlockVector);
15470 pBlockVector->AddStats(pStats);
15475 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15476 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15478 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
15483 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15485 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15486 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15487 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15488 VMA_ASSERT(pDedicatedAllocVector);
15489 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
15492 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
15493 VmaAddStatInfo(pStats->
total, allocationStatInfo);
15494 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
15495 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15500 VmaPostprocessCalcStatInfo(pStats->
total);
15501 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15502 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15503 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15504 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15507 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
15509 #if VMA_MEMORY_BUDGET
15510 if(m_UseExtMemoryBudget)
15512 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
15514 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
15515 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15517 const uint32_t heapIndex = firstHeap + i;
15519 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15522 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
15524 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
15525 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
15529 outBudget->
usage = 0;
15533 outBudget->
budget = VMA_MIN(
15534 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
15539 UpdateVulkanBudget();
15540 GetBudget(outBudget, firstHeap, heapCount);
15546 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15548 const uint32_t heapIndex = firstHeap + i;
15550 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15554 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
15559 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15561 VkResult VmaAllocator_T::DefragmentationBegin(
15571 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15572 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15575 (*pContext)->AddAllocations(
15578 VkResult res = (*pContext)->Defragment(
15583 if(res != VK_NOT_READY)
15585 vma_delete(
this, *pContext);
15586 *pContext = VMA_NULL;
15592 VkResult VmaAllocator_T::DefragmentationEnd(
15595 vma_delete(
this, context);
15601 if(hAllocation->CanBecomeLost())
15607 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15608 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15611 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15615 pAllocationInfo->
offset = 0;
15616 pAllocationInfo->
size = hAllocation->GetSize();
15618 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15621 else if(localLastUseFrameIndex == localCurrFrameIndex)
15623 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15624 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15625 pAllocationInfo->
offset = hAllocation->GetOffset();
15626 pAllocationInfo->
size = hAllocation->GetSize();
15628 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15633 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15635 localLastUseFrameIndex = localCurrFrameIndex;
15642 #if VMA_STATS_STRING_ENABLED
15643 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15644 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15647 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15648 if(localLastUseFrameIndex == localCurrFrameIndex)
15654 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15656 localLastUseFrameIndex = localCurrFrameIndex;
15662 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15663 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15664 pAllocationInfo->
offset = hAllocation->GetOffset();
15665 pAllocationInfo->
size = hAllocation->GetSize();
15666 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15667 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15671 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15674 if(hAllocation->CanBecomeLost())
15676 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15677 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15680 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15684 else if(localLastUseFrameIndex == localCurrFrameIndex)
15690 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15692 localLastUseFrameIndex = localCurrFrameIndex;
15699 #if VMA_STATS_STRING_ENABLED
15700 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15701 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15704 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15705 if(localLastUseFrameIndex == localCurrFrameIndex)
15711 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15713 localLastUseFrameIndex = localCurrFrameIndex;
15725 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15735 return VK_ERROR_INITIALIZATION_FAILED;
15738 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15740 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15742 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15743 if(res != VK_SUCCESS)
15745 vma_delete(
this, *pPool);
15752 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15753 (*pPool)->SetId(m_NextPoolId++);
15754 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15760 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15764 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15765 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15766 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15769 vma_delete(
this, pool);
15774 pool->m_BlockVector.GetPoolStats(pPoolStats);
15777 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15779 m_CurrentFrameIndex.store(frameIndex);
15781 #if VMA_MEMORY_BUDGET
15782 if(m_UseExtMemoryBudget)
15784 UpdateVulkanBudget();
15786 #endif // #if VMA_MEMORY_BUDGET
15789 void VmaAllocator_T::MakePoolAllocationsLost(
15791 size_t* pLostAllocationCount)
15793 hPool->m_BlockVector.MakePoolAllocationsLost(
15794 m_CurrentFrameIndex.load(),
15795 pLostAllocationCount);
15798 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15800 return hPool->m_BlockVector.CheckCorruption();
15803 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15805 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15808 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15810 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15812 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15813 VMA_ASSERT(pBlockVector);
15814 VkResult localRes = pBlockVector->CheckCorruption();
15817 case VK_ERROR_FEATURE_NOT_PRESENT:
15820 finalRes = VK_SUCCESS;
15830 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15831 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15833 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15835 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15838 case VK_ERROR_FEATURE_NOT_PRESENT:
15841 finalRes = VK_SUCCESS;
15853 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15855 *pAllocation = m_AllocationObjectAllocator.Allocate();
15856 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15857 (*pAllocation)->InitLost();
15860 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15862 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15865 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
15867 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15868 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
15871 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
15872 if(blockBytesAfterAllocation > heapSize)
15874 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15876 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
15884 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
15888 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15890 if(res == VK_SUCCESS)
15892 #if VMA_MEMORY_BUDGET
15893 ++m_Budget.m_OperationsSinceBudgetFetch;
15897 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15899 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15904 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
15910 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15913 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15915 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15919 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15921 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
15924 VkResult VmaAllocator_T::BindVulkanBuffer(
15925 VkDeviceMemory memory,
15926 VkDeviceSize memoryOffset,
15930 if(pNext != VMA_NULL)
15932 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
15933 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
15934 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
15936 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
15937 bindBufferMemoryInfo.pNext = pNext;
15938 bindBufferMemoryInfo.buffer = buffer;
15939 bindBufferMemoryInfo.memory = memory;
15940 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15941 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15944 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
15946 return VK_ERROR_EXTENSION_NOT_PRESENT;
15951 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
15955 VkResult VmaAllocator_T::BindVulkanImage(
15956 VkDeviceMemory memory,
15957 VkDeviceSize memoryOffset,
15961 if(pNext != VMA_NULL)
15963 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
15964 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
15965 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
15967 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
15968 bindBufferMemoryInfo.pNext = pNext;
15969 bindBufferMemoryInfo.image = image;
15970 bindBufferMemoryInfo.memory = memory;
15971 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15972 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15975 #endif // #if VMA_BIND_MEMORY2
15977 return VK_ERROR_EXTENSION_NOT_PRESENT;
15982 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
15986 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15988 if(hAllocation->CanBecomeLost())
15990 return VK_ERROR_MEMORY_MAP_FAILED;
15993 switch(hAllocation->GetType())
15995 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15997 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15998 char *pBytes = VMA_NULL;
15999 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
16000 if(res == VK_SUCCESS)
16002 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
16003 hAllocation->BlockAllocMap();
16007 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16008 return hAllocation->DedicatedAllocMap(
this, ppData);
16011 return VK_ERROR_MEMORY_MAP_FAILED;
16017 switch(hAllocation->GetType())
16019 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16021 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16022 hAllocation->BlockAllocUnmap();
16023 pBlock->Unmap(
this, 1);
16026 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16027 hAllocation->DedicatedAllocUnmap(
this);
16034 VkResult VmaAllocator_T::BindBufferMemory(
16036 VkDeviceSize allocationLocalOffset,
16040 VkResult res = VK_SUCCESS;
16041 switch(hAllocation->GetType())
16043 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16044 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
16046 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16048 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16049 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
16050 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
16059 VkResult VmaAllocator_T::BindImageMemory(
16061 VkDeviceSize allocationLocalOffset,
16065 VkResult res = VK_SUCCESS;
16066 switch(hAllocation->GetType())
16068 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16069 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
16071 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16073 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
16074 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
16075 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
16084 void VmaAllocator_T::FlushOrInvalidateAllocation(
16086 VkDeviceSize offset, VkDeviceSize size,
16087 VMA_CACHE_OPERATION op)
16089 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
16090 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
16092 const VkDeviceSize allocationSize = hAllocation->GetSize();
16093 VMA_ASSERT(offset <= allocationSize);
16095 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
16097 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
16098 memRange.memory = hAllocation->GetMemory();
16100 switch(hAllocation->GetType())
16102 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16103 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16104 if(size == VK_WHOLE_SIZE)
16106 memRange.size = allocationSize - memRange.offset;
16110 VMA_ASSERT(offset + size <= allocationSize);
16111 memRange.size = VMA_MIN(
16112 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
16113 allocationSize - memRange.offset);
16117 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16120 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16121 if(size == VK_WHOLE_SIZE)
16123 size = allocationSize - offset;
16127 VMA_ASSERT(offset + size <= allocationSize);
16129 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
16132 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
16133 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
16134 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
16135 memRange.offset += allocationOffset;
16136 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
16147 case VMA_CACHE_FLUSH:
16148 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
16150 case VMA_CACHE_INVALIDATE:
16151 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
16160 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
16162 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
16164 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16166 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16167 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16168 VMA_ASSERT(pDedicatedAllocations);
16169 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
16170 VMA_ASSERT(success);
16173 VkDeviceMemory hMemory = allocation->GetMemory();
16185 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
16187 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
16190 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
16192 VkBufferCreateInfo dummyBufCreateInfo;
16193 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
16195 uint32_t memoryTypeBits = 0;
16198 VkBuffer buf = VK_NULL_HANDLE;
16199 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
16200 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
16201 if(res == VK_SUCCESS)
16204 VkMemoryRequirements memReq;
16205 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
16206 memoryTypeBits = memReq.memoryTypeBits;
16209 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
16212 return memoryTypeBits;
16215 #if VMA_MEMORY_BUDGET
16217 void VmaAllocator_T::UpdateVulkanBudget()
16219 VMA_ASSERT(m_UseExtMemoryBudget);
16221 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
16223 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
16224 memProps.pNext = &budgetProps;
16226 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
16229 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
16231 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16233 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
16234 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
16235 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
16237 m_Budget.m_OperationsSinceBudgetFetch = 0;
16241 #endif // #if VMA_MEMORY_BUDGET
16243 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
16245 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
16246 !hAllocation->CanBecomeLost() &&
16247 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16249 void* pData = VMA_NULL;
16250 VkResult res = Map(hAllocation, &pData);
16251 if(res == VK_SUCCESS)
16253 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
16254 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
16255 Unmap(hAllocation);
16259 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
16264 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
16266 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
16267 if(memoryTypeBits == UINT32_MAX)
16269 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
16270 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
16272 return memoryTypeBits;
16275 #if VMA_STATS_STRING_ENABLED
16277 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
16279 bool dedicatedAllocationsStarted =
false;
16280 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16282 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16283 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16284 VMA_ASSERT(pDedicatedAllocVector);
16285 if(pDedicatedAllocVector->empty() ==
false)
16287 if(dedicatedAllocationsStarted ==
false)
16289 dedicatedAllocationsStarted =
true;
16290 json.WriteString(
"DedicatedAllocations");
16291 json.BeginObject();
16294 json.BeginString(
"Type ");
16295 json.ContinueString(memTypeIndex);
16300 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
16302 json.BeginObject(
true);
16304 hAlloc->PrintParameters(json);
16311 if(dedicatedAllocationsStarted)
16317 bool allocationsStarted =
false;
16318 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16320 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
16322 if(allocationsStarted ==
false)
16324 allocationsStarted =
true;
16325 json.WriteString(
"DefaultPools");
16326 json.BeginObject();
16329 json.BeginString(
"Type ");
16330 json.ContinueString(memTypeIndex);
16333 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
16336 if(allocationsStarted)
16344 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16345 const size_t poolCount = m_Pools.size();
16348 json.WriteString(
"Pools");
16349 json.BeginObject();
16350 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
16352 json.BeginString();
16353 json.ContinueString(m_Pools[poolIndex]->GetId());
16356 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
16363 #endif // #if VMA_STATS_STRING_ENABLED
16372 VMA_ASSERT(pCreateInfo && pAllocator);
16375 VMA_DEBUG_LOG(
"vmaCreateAllocator");
16377 return (*pAllocator)->Init(pCreateInfo);
16383 if(allocator != VK_NULL_HANDLE)
16385 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
16386 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
16387 vma_delete(&allocationCallbacks, allocator);
16393 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
16395 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
16396 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
16401 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
16403 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
16404 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
16409 uint32_t memoryTypeIndex,
16410 VkMemoryPropertyFlags* pFlags)
16412 VMA_ASSERT(allocator && pFlags);
16413 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
16414 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
16419 uint32_t frameIndex)
16421 VMA_ASSERT(allocator);
16422 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
16424 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16426 allocator->SetCurrentFrameIndex(frameIndex);
16433 VMA_ASSERT(allocator && pStats);
16434 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16435 allocator->CalculateStats(pStats);
16442 VMA_ASSERT(allocator && pBudget);
16443 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16444 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
16447 #if VMA_STATS_STRING_ENABLED
16451 char** ppStatsString,
16452 VkBool32 detailedMap)
16454 VMA_ASSERT(allocator && ppStatsString);
16455 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16457 VmaStringBuilder sb(allocator);
16459 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
16460 json.BeginObject();
16463 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
16466 allocator->CalculateStats(&stats);
16468 json.WriteString(
"Total");
16469 VmaPrintStatInfo(json, stats.
total);
16471 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
16473 json.BeginString(
"Heap ");
16474 json.ContinueString(heapIndex);
16476 json.BeginObject();
16478 json.WriteString(
"Size");
16479 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
16481 json.WriteString(
"Flags");
16482 json.BeginArray(
true);
16483 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
16485 json.WriteString(
"DEVICE_LOCAL");
16489 json.WriteString(
"Budget");
16490 json.BeginObject();
16492 json.WriteString(
"BlockBytes");
16493 json.WriteNumber(budget[heapIndex].blockBytes);
16494 json.WriteString(
"AllocationBytes");
16495 json.WriteNumber(budget[heapIndex].allocationBytes);
16496 json.WriteString(
"Usage");
16497 json.WriteNumber(budget[heapIndex].usage);
16498 json.WriteString(
"Budget");
16499 json.WriteNumber(budget[heapIndex].budget);
16505 json.WriteString(
"Stats");
16506 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
16509 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
16511 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
16513 json.BeginString(
"Type ");
16514 json.ContinueString(typeIndex);
16517 json.BeginObject();
16519 json.WriteString(
"Flags");
16520 json.BeginArray(
true);
16521 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
16522 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
16524 json.WriteString(
"DEVICE_LOCAL");
16526 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16528 json.WriteString(
"HOST_VISIBLE");
16530 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
16532 json.WriteString(
"HOST_COHERENT");
16534 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
16536 json.WriteString(
"HOST_CACHED");
16538 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
16540 json.WriteString(
"LAZILY_ALLOCATED");
16546 json.WriteString(
"Stats");
16547 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
16556 if(detailedMap == VK_TRUE)
16558 allocator->PrintDetailedMap(json);
16564 const size_t len = sb.GetLength();
16565 char*
const pChars = vma_new_array(allocator,
char, len + 1);
16568 memcpy(pChars, sb.GetData(), len);
16570 pChars[len] =
'\0';
16571 *ppStatsString = pChars;
16576 char* pStatsString)
16578 if(pStatsString != VMA_NULL)
16580 VMA_ASSERT(allocator);
16581 size_t len = strlen(pStatsString);
16582 vma_delete_array(allocator, pStatsString, len + 1);
16586 #endif // #if VMA_STATS_STRING_ENABLED
16593 uint32_t memoryTypeBits,
16595 uint32_t* pMemoryTypeIndex)
16597 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16598 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16599 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16606 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
16607 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
16608 uint32_t notPreferredFlags = 0;
16611 switch(pAllocationCreateInfo->
usage)
16616 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16618 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16622 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
16625 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16626 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16628 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16632 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16633 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
16636 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16639 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
16646 *pMemoryTypeIndex = UINT32_MAX;
16647 uint32_t minCost = UINT32_MAX;
16648 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
16649 memTypeIndex < allocator->GetMemoryTypeCount();
16650 ++memTypeIndex, memTypeBit <<= 1)
16653 if((memTypeBit & memoryTypeBits) != 0)
16655 const VkMemoryPropertyFlags currFlags =
16656 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
16658 if((requiredFlags & ~currFlags) == 0)
16661 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
16662 VmaCountBitsSet(currFlags & notPreferredFlags);
16664 if(currCost < minCost)
16666 *pMemoryTypeIndex = memTypeIndex;
16671 minCost = currCost;
16676 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
16681 const VkBufferCreateInfo* pBufferCreateInfo,
16683 uint32_t* pMemoryTypeIndex)
16685 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16686 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16687 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16688 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16690 const VkDevice hDev = allocator->m_hDevice;
16691 VkBuffer hBuffer = VK_NULL_HANDLE;
16692 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16693 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16694 if(res == VK_SUCCESS)
16696 VkMemoryRequirements memReq = {};
16697 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16698 hDev, hBuffer, &memReq);
16702 memReq.memoryTypeBits,
16703 pAllocationCreateInfo,
16706 allocator->GetVulkanFunctions().vkDestroyBuffer(
16707 hDev, hBuffer, allocator->GetAllocationCallbacks());
16714 const VkImageCreateInfo* pImageCreateInfo,
16716 uint32_t* pMemoryTypeIndex)
16718 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16719 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16720 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16721 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16723 const VkDevice hDev = allocator->m_hDevice;
16724 VkImage hImage = VK_NULL_HANDLE;
16725 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16726 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16727 if(res == VK_SUCCESS)
16729 VkMemoryRequirements memReq = {};
16730 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16731 hDev, hImage, &memReq);
16735 memReq.memoryTypeBits,
16736 pAllocationCreateInfo,
16739 allocator->GetVulkanFunctions().vkDestroyImage(
16740 hDev, hImage, allocator->GetAllocationCallbacks());
16750 VMA_ASSERT(allocator && pCreateInfo && pPool);
16752 VMA_DEBUG_LOG(
"vmaCreatePool");
16754 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16756 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16758 #if VMA_RECORDING_ENABLED
16759 if(allocator->GetRecorder() != VMA_NULL)
16761 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16772 VMA_ASSERT(allocator);
16774 if(pool == VK_NULL_HANDLE)
16779 VMA_DEBUG_LOG(
"vmaDestroyPool");
16781 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16783 #if VMA_RECORDING_ENABLED
16784 if(allocator->GetRecorder() != VMA_NULL)
16786 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16790 allocator->DestroyPool(pool);
16798 VMA_ASSERT(allocator && pool && pPoolStats);
16800 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16802 allocator->GetPoolStats(pool, pPoolStats);
16808 size_t* pLostAllocationCount)
16810 VMA_ASSERT(allocator && pool);
16812 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16814 #if VMA_RECORDING_ENABLED
16815 if(allocator->GetRecorder() != VMA_NULL)
16817 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16821 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16826 VMA_ASSERT(allocator && pool);
16828 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16830 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16832 return allocator->CheckPoolCorruption(pool);
16838 const char** ppName)
16840 VMA_ASSERT(allocator && pool);
16842 VMA_DEBUG_LOG(
"vmaGetPoolName");
16844 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16846 *ppName = pool->GetName();
16854 VMA_ASSERT(allocator && pool);
16856 VMA_DEBUG_LOG(
"vmaSetPoolName");
16858 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16860 pool->SetName(pName);
16862 #if VMA_RECORDING_ENABLED
16863 if(allocator->GetRecorder() != VMA_NULL)
16865 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
16872 const VkMemoryRequirements* pVkMemoryRequirements,
16877 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16879 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16881 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16883 VkResult result = allocator->AllocateMemory(
16884 *pVkMemoryRequirements,
16890 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16894 #if VMA_RECORDING_ENABLED
16895 if(allocator->GetRecorder() != VMA_NULL)
16897 allocator->GetRecorder()->RecordAllocateMemory(
16898 allocator->GetCurrentFrameIndex(),
16899 *pVkMemoryRequirements,
16905 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16907 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16915 const VkMemoryRequirements* pVkMemoryRequirements,
16917 size_t allocationCount,
16921 if(allocationCount == 0)
16926 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16928 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16930 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16932 VkResult result = allocator->AllocateMemory(
16933 *pVkMemoryRequirements,
16939 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16943 #if VMA_RECORDING_ENABLED
16944 if(allocator->GetRecorder() != VMA_NULL)
16946 allocator->GetRecorder()->RecordAllocateMemoryPages(
16947 allocator->GetCurrentFrameIndex(),
16948 *pVkMemoryRequirements,
16950 (uint64_t)allocationCount,
16955 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16957 for(
size_t i = 0; i < allocationCount; ++i)
16959 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16973 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16975 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16977 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16979 VkMemoryRequirements vkMemReq = {};
16980 bool requiresDedicatedAllocation =
false;
16981 bool prefersDedicatedAllocation =
false;
16982 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16983 requiresDedicatedAllocation,
16984 prefersDedicatedAllocation);
16986 VkResult result = allocator->AllocateMemory(
16988 requiresDedicatedAllocation,
16989 prefersDedicatedAllocation,
16993 VMA_SUBALLOCATION_TYPE_BUFFER,
16997 #if VMA_RECORDING_ENABLED
16998 if(allocator->GetRecorder() != VMA_NULL)
17000 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
17001 allocator->GetCurrentFrameIndex(),
17003 requiresDedicatedAllocation,
17004 prefersDedicatedAllocation,
17010 if(pAllocationInfo && result == VK_SUCCESS)
17012 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17025 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17027 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
17029 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17031 VkMemoryRequirements vkMemReq = {};
17032 bool requiresDedicatedAllocation =
false;
17033 bool prefersDedicatedAllocation =
false;
17034 allocator->GetImageMemoryRequirements(image, vkMemReq,
17035 requiresDedicatedAllocation, prefersDedicatedAllocation);
17037 VkResult result = allocator->AllocateMemory(
17039 requiresDedicatedAllocation,
17040 prefersDedicatedAllocation,
17044 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
17048 #if VMA_RECORDING_ENABLED
17049 if(allocator->GetRecorder() != VMA_NULL)
17051 allocator->GetRecorder()->RecordAllocateMemoryForImage(
17052 allocator->GetCurrentFrameIndex(),
17054 requiresDedicatedAllocation,
17055 prefersDedicatedAllocation,
17061 if(pAllocationInfo && result == VK_SUCCESS)
17063 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17073 VMA_ASSERT(allocator);
17075 if(allocation == VK_NULL_HANDLE)
17080 VMA_DEBUG_LOG(
"vmaFreeMemory");
17082 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17084 #if VMA_RECORDING_ENABLED
17085 if(allocator->GetRecorder() != VMA_NULL)
17087 allocator->GetRecorder()->RecordFreeMemory(
17088 allocator->GetCurrentFrameIndex(),
17093 allocator->FreeMemory(
17100 size_t allocationCount,
17103 if(allocationCount == 0)
17108 VMA_ASSERT(allocator);
17110 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
17112 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17114 #if VMA_RECORDING_ENABLED
17115 if(allocator->GetRecorder() != VMA_NULL)
17117 allocator->GetRecorder()->RecordFreeMemoryPages(
17118 allocator->GetCurrentFrameIndex(),
17119 (uint64_t)allocationCount,
17124 allocator->FreeMemory(allocationCount, pAllocations);
17130 VkDeviceSize newSize)
17132 VMA_ASSERT(allocator && allocation);
17134 VMA_DEBUG_LOG(
"vmaResizeAllocation");
17136 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17138 return allocator->ResizeAllocation(allocation, newSize);
17146 VMA_ASSERT(allocator && allocation && pAllocationInfo);
17148 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17150 #if VMA_RECORDING_ENABLED
17151 if(allocator->GetRecorder() != VMA_NULL)
17153 allocator->GetRecorder()->RecordGetAllocationInfo(
17154 allocator->GetCurrentFrameIndex(),
17159 allocator->GetAllocationInfo(allocation, pAllocationInfo);
17166 VMA_ASSERT(allocator && allocation);
17168 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17170 #if VMA_RECORDING_ENABLED
17171 if(allocator->GetRecorder() != VMA_NULL)
17173 allocator->GetRecorder()->RecordTouchAllocation(
17174 allocator->GetCurrentFrameIndex(),
17179 return allocator->TouchAllocation(allocation);
17187 VMA_ASSERT(allocator && allocation);
17189 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17191 allocation->SetUserData(allocator, pUserData);
17193 #if VMA_RECORDING_ENABLED
17194 if(allocator->GetRecorder() != VMA_NULL)
17196 allocator->GetRecorder()->RecordSetAllocationUserData(
17197 allocator->GetCurrentFrameIndex(),
17208 VMA_ASSERT(allocator && pAllocation);
17210 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17212 allocator->CreateLostAllocation(pAllocation);
17214 #if VMA_RECORDING_ENABLED
17215 if(allocator->GetRecorder() != VMA_NULL)
17217 allocator->GetRecorder()->RecordCreateLostAllocation(
17218 allocator->GetCurrentFrameIndex(),
17229 VMA_ASSERT(allocator && allocation && ppData);
17231 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17233 VkResult res = allocator->Map(allocation, ppData);
17235 #if VMA_RECORDING_ENABLED
17236 if(allocator->GetRecorder() != VMA_NULL)
17238 allocator->GetRecorder()->RecordMapMemory(
17239 allocator->GetCurrentFrameIndex(),
17251 VMA_ASSERT(allocator && allocation);
17253 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17255 #if VMA_RECORDING_ENABLED
17256 if(allocator->GetRecorder() != VMA_NULL)
17258 allocator->GetRecorder()->RecordUnmapMemory(
17259 allocator->GetCurrentFrameIndex(),
17264 allocator->Unmap(allocation);
17269 VMA_ASSERT(allocator && allocation);
17271 VMA_DEBUG_LOG(
"vmaFlushAllocation");
17273 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17275 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
17277 #if VMA_RECORDING_ENABLED
17278 if(allocator->GetRecorder() != VMA_NULL)
17280 allocator->GetRecorder()->RecordFlushAllocation(
17281 allocator->GetCurrentFrameIndex(),
17282 allocation, offset, size);
17289 VMA_ASSERT(allocator && allocation);
17291 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
17293 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17295 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
17297 #if VMA_RECORDING_ENABLED
17298 if(allocator->GetRecorder() != VMA_NULL)
17300 allocator->GetRecorder()->RecordInvalidateAllocation(
17301 allocator->GetCurrentFrameIndex(),
17302 allocation, offset, size);
17309 VMA_ASSERT(allocator);
17311 VMA_DEBUG_LOG(
"vmaCheckCorruption");
17313 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17315 return allocator->CheckCorruption(memoryTypeBits);
17321 size_t allocationCount,
17322 VkBool32* pAllocationsChanged,
17332 if(pDefragmentationInfo != VMA_NULL)
17346 if(res == VK_NOT_READY)
17359 VMA_ASSERT(allocator && pInfo && pContext);
17370 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
17372 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
17374 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17376 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
17378 #if VMA_RECORDING_ENABLED
17379 if(allocator->GetRecorder() != VMA_NULL)
17381 allocator->GetRecorder()->RecordDefragmentationBegin(
17382 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
17393 VMA_ASSERT(allocator);
17395 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
17397 if(context != VK_NULL_HANDLE)
17399 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17401 #if VMA_RECORDING_ENABLED
17402 if(allocator->GetRecorder() != VMA_NULL)
17404 allocator->GetRecorder()->RecordDefragmentationEnd(
17405 allocator->GetCurrentFrameIndex(), context);
17409 return allocator->DefragmentationEnd(context);
17422 VMA_ASSERT(allocator && allocation && buffer);
17424 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
17426 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17428 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
17434 VkDeviceSize allocationLocalOffset,
17438 VMA_ASSERT(allocator && allocation && buffer);
17440 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
17442 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17444 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
17452 VMA_ASSERT(allocator && allocation && image);
17454 VMA_DEBUG_LOG(
"vmaBindImageMemory");
17456 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17458 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
17464 VkDeviceSize allocationLocalOffset,
17468 VMA_ASSERT(allocator && allocation && image);
17470 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
17472 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17474 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
17479 const VkBufferCreateInfo* pBufferCreateInfo,
17485 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
17487 if(pBufferCreateInfo->size == 0)
17489 return VK_ERROR_VALIDATION_FAILED_EXT;
17492 VMA_DEBUG_LOG(
"vmaCreateBuffer");
17494 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17496 *pBuffer = VK_NULL_HANDLE;
17497 *pAllocation = VK_NULL_HANDLE;
17500 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
17501 allocator->m_hDevice,
17503 allocator->GetAllocationCallbacks(),
17508 VkMemoryRequirements vkMemReq = {};
17509 bool requiresDedicatedAllocation =
false;
17510 bool prefersDedicatedAllocation =
false;
17511 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
17512 requiresDedicatedAllocation, prefersDedicatedAllocation);
17516 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
17518 VMA_ASSERT(vkMemReq.alignment %
17519 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
17521 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
17523 VMA_ASSERT(vkMemReq.alignment %
17524 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
17526 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
17528 VMA_ASSERT(vkMemReq.alignment %
17529 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
17533 res = allocator->AllocateMemory(
17535 requiresDedicatedAllocation,
17536 prefersDedicatedAllocation,
17539 *pAllocationCreateInfo,
17540 VMA_SUBALLOCATION_TYPE_BUFFER,
17544 #if VMA_RECORDING_ENABLED
17545 if(allocator->GetRecorder() != VMA_NULL)
17547 allocator->GetRecorder()->RecordCreateBuffer(
17548 allocator->GetCurrentFrameIndex(),
17549 *pBufferCreateInfo,
17550 *pAllocationCreateInfo,
17560 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
17565 #if VMA_STATS_STRING_ENABLED
17566 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
17568 if(pAllocationInfo != VMA_NULL)
17570 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17575 allocator->FreeMemory(
17578 *pAllocation = VK_NULL_HANDLE;
17579 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17580 *pBuffer = VK_NULL_HANDLE;
17583 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17584 *pBuffer = VK_NULL_HANDLE;
17595 VMA_ASSERT(allocator);
17597 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17602 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
17604 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17606 #if VMA_RECORDING_ENABLED
17607 if(allocator->GetRecorder() != VMA_NULL)
17609 allocator->GetRecorder()->RecordDestroyBuffer(
17610 allocator->GetCurrentFrameIndex(),
17615 if(buffer != VK_NULL_HANDLE)
17617 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
17620 if(allocation != VK_NULL_HANDLE)
17622 allocator->FreeMemory(
17630 const VkImageCreateInfo* pImageCreateInfo,
17636 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
17638 if(pImageCreateInfo->extent.width == 0 ||
17639 pImageCreateInfo->extent.height == 0 ||
17640 pImageCreateInfo->extent.depth == 0 ||
17641 pImageCreateInfo->mipLevels == 0 ||
17642 pImageCreateInfo->arrayLayers == 0)
17644 return VK_ERROR_VALIDATION_FAILED_EXT;
17647 VMA_DEBUG_LOG(
"vmaCreateImage");
17649 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17651 *pImage = VK_NULL_HANDLE;
17652 *pAllocation = VK_NULL_HANDLE;
17655 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
17656 allocator->m_hDevice,
17658 allocator->GetAllocationCallbacks(),
17662 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
17663 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
17664 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
17667 VkMemoryRequirements vkMemReq = {};
17668 bool requiresDedicatedAllocation =
false;
17669 bool prefersDedicatedAllocation =
false;
17670 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
17671 requiresDedicatedAllocation, prefersDedicatedAllocation);
17673 res = allocator->AllocateMemory(
17675 requiresDedicatedAllocation,
17676 prefersDedicatedAllocation,
17679 *pAllocationCreateInfo,
17684 #if VMA_RECORDING_ENABLED
17685 if(allocator->GetRecorder() != VMA_NULL)
17687 allocator->GetRecorder()->RecordCreateImage(
17688 allocator->GetCurrentFrameIndex(),
17690 *pAllocationCreateInfo,
17700 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
17705 #if VMA_STATS_STRING_ENABLED
17706 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
17708 if(pAllocationInfo != VMA_NULL)
17710 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17715 allocator->FreeMemory(
17718 *pAllocation = VK_NULL_HANDLE;
17719 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17720 *pImage = VK_NULL_HANDLE;
17723 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17724 *pImage = VK_NULL_HANDLE;
17735 VMA_ASSERT(allocator);
17737 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17742 VMA_DEBUG_LOG(
"vmaDestroyImage");
17744 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17746 #if VMA_RECORDING_ENABLED
17747 if(allocator->GetRecorder() != VMA_NULL)
17749 allocator->GetRecorder()->RecordDestroyImage(
17750 allocator->GetCurrentFrameIndex(),
17755 if(image != VK_NULL_HANDLE)
17757 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17759 if(allocation != VK_NULL_HANDLE)
17761 allocator->FreeMemory(
17767 #endif // #ifdef VMA_IMPLEMENTATION