23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1764 #ifndef VMA_RECORDING_ENABLED
1765 #define VMA_RECORDING_ENABLED 0
1769 #define NOMINMAX // For windows.h
1773 #include <vulkan/vulkan.h>
1776 #if VMA_RECORDING_ENABLED
1777 #include <windows.h>
1780 #if !defined(VMA_DEDICATED_ALLOCATION)
1781 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1782 #define VMA_DEDICATED_ALLOCATION 1
1784 #define VMA_DEDICATED_ALLOCATION 0
1788 #if !defined(VMA_BIND_MEMORY2)
1789 #if VK_KHR_bind_memory2
1790 #define VMA_BIND_MEMORY2 1
1792 #define VMA_BIND_MEMORY2 0
1796 #if !defined(VMA_MEMORY_BUDGET)
1797 #if VK_EXT_memory_budget && VK_KHR_get_physical_device_properties2
1798 #define VMA_MEMORY_BUDGET 1
1800 #define VMA_MEMORY_BUDGET 0
1809 #ifndef VMA_CALL_PRE
1810 #define VMA_CALL_PRE
1812 #ifndef VMA_CALL_POST
1813 #define VMA_CALL_POST
1830 uint32_t memoryType,
1831 VkDeviceMemory memory,
1836 uint32_t memoryType,
1837 VkDeviceMemory memory,
1934 #if VMA_DEDICATED_ALLOCATION
1935 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1936 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1938 #if VMA_BIND_MEMORY2
1939 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
1940 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
1942 #if VMA_MEMORY_BUDGET
1943 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
2075 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
2083 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
2093 uint32_t memoryTypeIndex,
2094 VkMemoryPropertyFlags* pFlags);
2106 uint32_t frameIndex);
2199 #ifndef VMA_STATS_STRING_ENABLED
2200 #define VMA_STATS_STRING_ENABLED 1
2203 #if VMA_STATS_STRING_ENABLED
2210 char** ppStatsString,
2211 VkBool32 detailedMap);
2215 char* pStatsString);
2217 #endif // #if VMA_STATS_STRING_ENABLED
2454 uint32_t memoryTypeBits,
2456 uint32_t* pMemoryTypeIndex);
2472 const VkBufferCreateInfo* pBufferCreateInfo,
2474 uint32_t* pMemoryTypeIndex);
2490 const VkImageCreateInfo* pImageCreateInfo,
2492 uint32_t* pMemoryTypeIndex);
2664 size_t* pLostAllocationCount);
2763 const VkMemoryRequirements* pVkMemoryRequirements,
2789 const VkMemoryRequirements* pVkMemoryRequirements,
2791 size_t allocationCount,
2836 size_t allocationCount,
2848 VkDeviceSize newSize);
3228 size_t allocationCount,
3229 VkBool32* pAllocationsChanged,
3263 VkDeviceSize allocationLocalOffset,
3297 VkDeviceSize allocationLocalOffset,
3329 const VkBufferCreateInfo* pBufferCreateInfo,
3354 const VkImageCreateInfo* pImageCreateInfo,
3380 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3383 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3384 #define VMA_IMPLEMENTATION
3387 #ifdef VMA_IMPLEMENTATION
3388 #undef VMA_IMPLEMENTATION
3410 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3411 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3423 #if VMA_USE_STL_CONTAINERS
3424 #define VMA_USE_STL_VECTOR 1
3425 #define VMA_USE_STL_UNORDERED_MAP 1
3426 #define VMA_USE_STL_LIST 1
3429 #ifndef VMA_USE_STL_SHARED_MUTEX
3431 #if __cplusplus >= 201703L
3432 #define VMA_USE_STL_SHARED_MUTEX 1
3436 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3437 #define VMA_USE_STL_SHARED_MUTEX 1
3439 #define VMA_USE_STL_SHARED_MUTEX 0
3447 #if VMA_USE_STL_VECTOR
3451 #if VMA_USE_STL_UNORDERED_MAP
3452 #include <unordered_map>
3455 #if VMA_USE_STL_LIST
3464 #include <algorithm>
3469 #define VMA_NULL nullptr
3472 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3474 void *aligned_alloc(
size_t alignment,
size_t size)
3477 if(alignment <
sizeof(
void*))
3479 alignment =
sizeof(
void*);
3482 return memalign(alignment, size);
3484 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3486 void *aligned_alloc(
size_t alignment,
size_t size)
3489 if(alignment <
sizeof(
void*))
3491 alignment =
sizeof(
void*);
3495 if(posix_memalign(&pointer, alignment, size) == 0)
3509 #define VMA_ASSERT(expr) assert(expr)
3511 #define VMA_ASSERT(expr)
3517 #ifndef VMA_HEAVY_ASSERT
3519 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3521 #define VMA_HEAVY_ASSERT(expr)
3525 #ifndef VMA_ALIGN_OF
3526 #define VMA_ALIGN_OF(type) (__alignof(type))
3529 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3531 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3533 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3537 #ifndef VMA_SYSTEM_FREE
3539 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3541 #define VMA_SYSTEM_FREE(ptr) free(ptr)
3546 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
3550 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
3554 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
3558 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
3561 #ifndef VMA_DEBUG_LOG
3562 #define VMA_DEBUG_LOG(format, ...)
3572 #if VMA_STATS_STRING_ENABLED
3573 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3575 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3577 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3579 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3581 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3583 snprintf(outStr, strLen,
"%p", ptr);
3591 void Lock() { m_Mutex.lock(); }
3592 void Unlock() { m_Mutex.unlock(); }
3596 #define VMA_MUTEX VmaMutex
3600 #ifndef VMA_RW_MUTEX
3601 #if VMA_USE_STL_SHARED_MUTEX
3603 #include <shared_mutex>
3607 void LockRead() { m_Mutex.lock_shared(); }
3608 void UnlockRead() { m_Mutex.unlock_shared(); }
3609 void LockWrite() { m_Mutex.lock(); }
3610 void UnlockWrite() { m_Mutex.unlock(); }
3612 std::shared_mutex m_Mutex;
3614 #define VMA_RW_MUTEX VmaRWMutex
3615 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
3621 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3622 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3623 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3624 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3625 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3629 #define VMA_RW_MUTEX VmaRWMutex
3635 void LockRead() { m_Mutex.Lock(); }
3636 void UnlockRead() { m_Mutex.Unlock(); }
3637 void LockWrite() { m_Mutex.Lock(); }
3638 void UnlockWrite() { m_Mutex.Unlock(); }
3642 #define VMA_RW_MUTEX VmaRWMutex
3643 #endif // #if VMA_USE_STL_SHARED_MUTEX
3644 #endif // #ifndef VMA_RW_MUTEX
3649 #ifndef VMA_ATOMIC_UINT32
3651 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
3654 #ifndef VMA_ATOMIC_UINT64
3656 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
3659 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
3664 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
3667 #ifndef VMA_DEBUG_ALIGNMENT
3672 #define VMA_DEBUG_ALIGNMENT (1)
3675 #ifndef VMA_DEBUG_MARGIN
3680 #define VMA_DEBUG_MARGIN (0)
3683 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
3688 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
3691 #ifndef VMA_DEBUG_DETECT_CORRUPTION
3697 #define VMA_DEBUG_DETECT_CORRUPTION (0)
3700 #ifndef VMA_DEBUG_GLOBAL_MUTEX
3705 #define VMA_DEBUG_GLOBAL_MUTEX (0)
3708 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
3713 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
3716 #ifndef VMA_SMALL_HEAP_MAX_SIZE
3717 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
3721 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
3722 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
3726 #ifndef VMA_CLASS_NO_COPY
3727 #define VMA_CLASS_NO_COPY(className) \
3729 className(const className&) = delete; \
3730 className& operator=(const className&) = delete;
3733 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3736 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3738 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3739 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3745 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3747 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3748 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3751 static inline uint32_t VmaCountBitsSet(uint32_t v)
3753 uint32_t c = v - ((v >> 1) & 0x55555555);
3754 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3755 c = ((c >> 4) + c) & 0x0F0F0F0F;
3756 c = ((c >> 8) + c) & 0x00FF00FF;
3757 c = ((c >> 16) + c) & 0x0000FFFF;
3763 template <
typename T>
3764 static inline T VmaAlignUp(T val, T align)
3766 return (val + align - 1) / align * align;
3770 template <
typename T>
3771 static inline T VmaAlignDown(T val, T align)
3773 return val / align * align;
3777 template <
typename T>
3778 static inline T VmaRoundDiv(T x, T y)
3780 return (x + (y / (T)2)) / y;
3788 template <
typename T>
3789 inline bool VmaIsPow2(T x)
3791 return (x & (x-1)) == 0;
3795 static inline uint32_t VmaNextPow2(uint32_t v)
3806 static inline uint64_t VmaNextPow2(uint64_t v)
3820 static inline uint32_t VmaPrevPow2(uint32_t v)
3830 static inline uint64_t VmaPrevPow2(uint64_t v)
3842 static inline bool VmaStrIsEmpty(
const char* pStr)
3844 return pStr == VMA_NULL || *pStr ==
'\0';
3847 #if VMA_STATS_STRING_ENABLED
3849 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3865 #endif // #if VMA_STATS_STRING_ENABLED
3869 template<
typename Iterator,
typename Compare>
3870 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3872 Iterator centerValue = end; --centerValue;
3873 Iterator insertIndex = beg;
3874 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3876 if(cmp(*memTypeIndex, *centerValue))
3878 if(insertIndex != memTypeIndex)
3880 VMA_SWAP(*memTypeIndex, *insertIndex);
3885 if(insertIndex != centerValue)
3887 VMA_SWAP(*insertIndex, *centerValue);
3892 template<
typename Iterator,
typename Compare>
3893 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3897 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3898 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3899 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3903 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
3905 #endif // #ifndef VMA_SORT
3914 static inline bool VmaBlocksOnSamePage(
3915 VkDeviceSize resourceAOffset,
3916 VkDeviceSize resourceASize,
3917 VkDeviceSize resourceBOffset,
3918 VkDeviceSize pageSize)
3920 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3921 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3922 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3923 VkDeviceSize resourceBStart = resourceBOffset;
3924 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3925 return resourceAEndPage == resourceBStartPage;
3928 enum VmaSuballocationType
3930 VMA_SUBALLOCATION_TYPE_FREE = 0,
3931 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3932 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3933 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3934 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3935 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3936 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3945 static inline bool VmaIsBufferImageGranularityConflict(
3946 VmaSuballocationType suballocType1,
3947 VmaSuballocationType suballocType2)
3949 if(suballocType1 > suballocType2)
3951 VMA_SWAP(suballocType1, suballocType2);
3954 switch(suballocType1)
3956 case VMA_SUBALLOCATION_TYPE_FREE:
3958 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3960 case VMA_SUBALLOCATION_TYPE_BUFFER:
3962 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3963 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3964 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3966 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3967 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3968 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3969 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3971 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3972 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3980 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3982 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
3983 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3984 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3985 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3987 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3994 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3996 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
3997 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3998 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3999 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4001 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4014 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4016 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4017 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4018 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4019 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4025 VMA_CLASS_NO_COPY(VmaMutexLock)
4027 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4028 m_pMutex(useMutex ? &mutex : VMA_NULL)
4029 {
if(m_pMutex) { m_pMutex->Lock(); } }
4031 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4033 VMA_MUTEX* m_pMutex;
4037 struct VmaMutexLockRead
4039 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4041 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4042 m_pMutex(useMutex ? &mutex : VMA_NULL)
4043 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4044 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4046 VMA_RW_MUTEX* m_pMutex;
4050 struct VmaMutexLockWrite
4052 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4054 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4055 m_pMutex(useMutex ? &mutex : VMA_NULL)
4056 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4057 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4059 VMA_RW_MUTEX* m_pMutex;
4062 #if VMA_DEBUG_GLOBAL_MUTEX
4063 static VMA_MUTEX gDebugGlobalMutex;
4064 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4066 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4070 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4081 template <
typename CmpLess,
typename IterT,
typename KeyT>
4082 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4084 size_t down = 0, up = (end - beg);
4087 const size_t mid = (down + up) / 2;
4088 if(cmp(*(beg+mid), key))
4100 template<
typename CmpLess,
typename IterT,
typename KeyT>
4101 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4103 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4104 beg, end, value, cmp);
4106 (!cmp(*it, value) && !cmp(value, *it)))
4118 template<
typename T>
4119 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4121 for(uint32_t i = 0; i < count; ++i)
4123 const T iPtr = arr[i];
4124 if(iPtr == VMA_NULL)
4128 for(uint32_t j = i + 1; j < count; ++j)
4142 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4144 if((pAllocationCallbacks != VMA_NULL) &&
4145 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4147 return (*pAllocationCallbacks->pfnAllocation)(
4148 pAllocationCallbacks->pUserData,
4151 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4155 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4159 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4161 if((pAllocationCallbacks != VMA_NULL) &&
4162 (pAllocationCallbacks->pfnFree != VMA_NULL))
4164 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4168 VMA_SYSTEM_FREE(ptr);
4172 template<
typename T>
4173 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4175 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4178 template<
typename T>
4179 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4181 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4184 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4186 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4188 template<
typename T>
4189 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4192 VmaFree(pAllocationCallbacks, ptr);
4195 template<
typename T>
4196 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4200 for(
size_t i = count; i--; )
4204 VmaFree(pAllocationCallbacks, ptr);
4209 template<
typename T>
4210 class VmaStlAllocator
4213 const VkAllocationCallbacks*
const m_pCallbacks;
4214 typedef T value_type;
4216 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4217 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4219 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4220 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4222 template<
typename U>
4223 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4225 return m_pCallbacks == rhs.m_pCallbacks;
4227 template<
typename U>
4228 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4230 return m_pCallbacks != rhs.m_pCallbacks;
4233 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4236 #if VMA_USE_STL_VECTOR
4238 #define VmaVector std::vector
4240 template<
typename T,
typename allocatorT>
4241 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4243 vec.insert(vec.begin() + index, item);
4246 template<
typename T,
typename allocatorT>
4247 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4249 vec.erase(vec.begin() + index);
4252 #else // #if VMA_USE_STL_VECTOR
4257 template<
typename T,
typename AllocatorT>
4261 typedef T value_type;
4263 VmaVector(
const AllocatorT& allocator) :
4264 m_Allocator(allocator),
4271 VmaVector(
size_t count,
const AllocatorT& allocator) :
4272 m_Allocator(allocator),
4273 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4281 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4282 : VmaVector(count, allocator) {}
4284 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4285 m_Allocator(src.m_Allocator),
4286 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4287 m_Count(src.m_Count),
4288 m_Capacity(src.m_Count)
4292 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4298 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4301 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4305 resize(rhs.m_Count);
4308 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4314 bool empty()
const {
return m_Count == 0; }
4315 size_t size()
const {
return m_Count; }
4316 T* data() {
return m_pArray; }
4317 const T* data()
const {
return m_pArray; }
4319 T& operator[](
size_t index)
4321 VMA_HEAVY_ASSERT(index < m_Count);
4322 return m_pArray[index];
4324 const T& operator[](
size_t index)
const
4326 VMA_HEAVY_ASSERT(index < m_Count);
4327 return m_pArray[index];
4332 VMA_HEAVY_ASSERT(m_Count > 0);
4335 const T& front()
const
4337 VMA_HEAVY_ASSERT(m_Count > 0);
4342 VMA_HEAVY_ASSERT(m_Count > 0);
4343 return m_pArray[m_Count - 1];
4345 const T& back()
const
4347 VMA_HEAVY_ASSERT(m_Count > 0);
4348 return m_pArray[m_Count - 1];
4351 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4353 newCapacity = VMA_MAX(newCapacity, m_Count);
4355 if((newCapacity < m_Capacity) && !freeMemory)
4357 newCapacity = m_Capacity;
4360 if(newCapacity != m_Capacity)
4362 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4365 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4367 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4368 m_Capacity = newCapacity;
4369 m_pArray = newArray;
4373 void resize(
size_t newCount,
bool freeMemory =
false)
4375 size_t newCapacity = m_Capacity;
4376 if(newCount > m_Capacity)
4378 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4382 newCapacity = newCount;
4385 if(newCapacity != m_Capacity)
4387 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4388 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4389 if(elementsToCopy != 0)
4391 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4393 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4394 m_Capacity = newCapacity;
4395 m_pArray = newArray;
4401 void clear(
bool freeMemory =
false)
4403 resize(0, freeMemory);
4406 void insert(
size_t index,
const T& src)
4408 VMA_HEAVY_ASSERT(index <= m_Count);
4409 const size_t oldCount = size();
4410 resize(oldCount + 1);
4411 if(index < oldCount)
4413 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4415 m_pArray[index] = src;
4418 void remove(
size_t index)
4420 VMA_HEAVY_ASSERT(index < m_Count);
4421 const size_t oldCount = size();
4422 if(index < oldCount - 1)
4424 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4426 resize(oldCount - 1);
4429 void push_back(
const T& src)
4431 const size_t newIndex = size();
4432 resize(newIndex + 1);
4433 m_pArray[newIndex] = src;
4438 VMA_HEAVY_ASSERT(m_Count > 0);
4442 void push_front(
const T& src)
4449 VMA_HEAVY_ASSERT(m_Count > 0);
4453 typedef T* iterator;
4455 iterator begin() {
return m_pArray; }
4456 iterator end() {
return m_pArray + m_Count; }
4459 AllocatorT m_Allocator;
4465 template<
typename T,
typename allocatorT>
4466 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4468 vec.insert(index, item);
4471 template<
typename T,
typename allocatorT>
4472 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4477 #endif // #if VMA_USE_STL_VECTOR
4479 template<
typename CmpLess,
typename VectorT>
4480 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4482 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4484 vector.data() + vector.size(),
4486 CmpLess()) - vector.data();
4487 VmaVectorInsert(vector, indexToInsert, value);
4488 return indexToInsert;
4491 template<
typename CmpLess,
typename VectorT>
4492 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4495 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4500 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4502 size_t indexToRemove = it - vector.begin();
4503 VmaVectorRemove(vector, indexToRemove);
4517 template<
typename T>
4518 class VmaPoolAllocator
4520 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4522 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4523 ~VmaPoolAllocator();
4530 uint32_t NextFreeIndex;
4531 alignas(T)
char Value[
sizeof(T)];
4538 uint32_t FirstFreeIndex;
4541 const VkAllocationCallbacks* m_pAllocationCallbacks;
4542 const uint32_t m_FirstBlockCapacity;
4543 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4545 ItemBlock& CreateNewBlock();
4548 template<
typename T>
4549 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4550 m_pAllocationCallbacks(pAllocationCallbacks),
4551 m_FirstBlockCapacity(firstBlockCapacity),
4552 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4554 VMA_ASSERT(m_FirstBlockCapacity > 1);
4557 template<
typename T>
4558 VmaPoolAllocator<T>::~VmaPoolAllocator()
4560 for(
size_t i = m_ItemBlocks.size(); i--; )
4561 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4562 m_ItemBlocks.clear();
4565 template<
typename T>
4566 T* VmaPoolAllocator<T>::Alloc()
4568 for(
size_t i = m_ItemBlocks.size(); i--; )
4570 ItemBlock& block = m_ItemBlocks[i];
4572 if(block.FirstFreeIndex != UINT32_MAX)
4574 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4575 block.FirstFreeIndex = pItem->NextFreeIndex;
4576 T* result = (T*)&pItem->Value;
4583 ItemBlock& newBlock = CreateNewBlock();
4584 Item*
const pItem = &newBlock.pItems[0];
4585 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4586 T* result = (T*)&pItem->Value;
4591 template<
typename T>
4592 void VmaPoolAllocator<T>::Free(T* ptr)
4595 for(
size_t i = m_ItemBlocks.size(); i--; )
4597 ItemBlock& block = m_ItemBlocks[i];
4601 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4604 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4607 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4608 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4609 block.FirstFreeIndex = index;
4613 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4616 template<
typename T>
4617 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4619 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4620 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4622 const ItemBlock newBlock = {
4623 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4627 m_ItemBlocks.push_back(newBlock);
4630 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4631 newBlock.pItems[i].NextFreeIndex = i + 1;
4632 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4633 return m_ItemBlocks.back();
4639 #if VMA_USE_STL_LIST
4641 #define VmaList std::list
4643 #else // #if VMA_USE_STL_LIST
4645 template<
typename T>
4654 template<
typename T>
4657 VMA_CLASS_NO_COPY(VmaRawList)
4659 typedef VmaListItem<T> ItemType;
4661 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4665 size_t GetCount()
const {
return m_Count; }
4666 bool IsEmpty()
const {
return m_Count == 0; }
4668 ItemType* Front() {
return m_pFront; }
4669 const ItemType* Front()
const {
return m_pFront; }
4670 ItemType* Back() {
return m_pBack; }
4671 const ItemType* Back()
const {
return m_pBack; }
4673 ItemType* PushBack();
4674 ItemType* PushFront();
4675 ItemType* PushBack(
const T& value);
4676 ItemType* PushFront(
const T& value);
4681 ItemType* InsertBefore(ItemType* pItem);
4683 ItemType* InsertAfter(ItemType* pItem);
4685 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4686 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4688 void Remove(ItemType* pItem);
4691 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4692 VmaPoolAllocator<ItemType> m_ItemAllocator;
4698 template<
typename T>
4699 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4700 m_pAllocationCallbacks(pAllocationCallbacks),
4701 m_ItemAllocator(pAllocationCallbacks, 128),
4708 template<
typename T>
4709 VmaRawList<T>::~VmaRawList()
4715 template<
typename T>
4716 void VmaRawList<T>::Clear()
4718 if(IsEmpty() ==
false)
4720 ItemType* pItem = m_pBack;
4721 while(pItem != VMA_NULL)
4723 ItemType*
const pPrevItem = pItem->pPrev;
4724 m_ItemAllocator.Free(pItem);
4727 m_pFront = VMA_NULL;
4733 template<
typename T>
4734 VmaListItem<T>* VmaRawList<T>::PushBack()
4736 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4737 pNewItem->pNext = VMA_NULL;
4740 pNewItem->pPrev = VMA_NULL;
4741 m_pFront = pNewItem;
4747 pNewItem->pPrev = m_pBack;
4748 m_pBack->pNext = pNewItem;
4755 template<
typename T>
4756 VmaListItem<T>* VmaRawList<T>::PushFront()
4758 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4759 pNewItem->pPrev = VMA_NULL;
4762 pNewItem->pNext = VMA_NULL;
4763 m_pFront = pNewItem;
4769 pNewItem->pNext = m_pFront;
4770 m_pFront->pPrev = pNewItem;
4771 m_pFront = pNewItem;
4777 template<
typename T>
4778 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4780 ItemType*
const pNewItem = PushBack();
4781 pNewItem->Value = value;
4785 template<
typename T>
4786 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4788 ItemType*
const pNewItem = PushFront();
4789 pNewItem->Value = value;
4793 template<
typename T>
4794 void VmaRawList<T>::PopBack()
4796 VMA_HEAVY_ASSERT(m_Count > 0);
4797 ItemType*
const pBackItem = m_pBack;
4798 ItemType*
const pPrevItem = pBackItem->pPrev;
4799 if(pPrevItem != VMA_NULL)
4801 pPrevItem->pNext = VMA_NULL;
4803 m_pBack = pPrevItem;
4804 m_ItemAllocator.Free(pBackItem);
4808 template<
typename T>
4809 void VmaRawList<T>::PopFront()
4811 VMA_HEAVY_ASSERT(m_Count > 0);
4812 ItemType*
const pFrontItem = m_pFront;
4813 ItemType*
const pNextItem = pFrontItem->pNext;
4814 if(pNextItem != VMA_NULL)
4816 pNextItem->pPrev = VMA_NULL;
4818 m_pFront = pNextItem;
4819 m_ItemAllocator.Free(pFrontItem);
4823 template<
typename T>
4824 void VmaRawList<T>::Remove(ItemType* pItem)
4826 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4827 VMA_HEAVY_ASSERT(m_Count > 0);
4829 if(pItem->pPrev != VMA_NULL)
4831 pItem->pPrev->pNext = pItem->pNext;
4835 VMA_HEAVY_ASSERT(m_pFront == pItem);
4836 m_pFront = pItem->pNext;
4839 if(pItem->pNext != VMA_NULL)
4841 pItem->pNext->pPrev = pItem->pPrev;
4845 VMA_HEAVY_ASSERT(m_pBack == pItem);
4846 m_pBack = pItem->pPrev;
4849 m_ItemAllocator.Free(pItem);
4853 template<
typename T>
4854 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4856 if(pItem != VMA_NULL)
4858 ItemType*
const prevItem = pItem->pPrev;
4859 ItemType*
const newItem = m_ItemAllocator.Alloc();
4860 newItem->pPrev = prevItem;
4861 newItem->pNext = pItem;
4862 pItem->pPrev = newItem;
4863 if(prevItem != VMA_NULL)
4865 prevItem->pNext = newItem;
4869 VMA_HEAVY_ASSERT(m_pFront == pItem);
4879 template<
typename T>
4880 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4882 if(pItem != VMA_NULL)
4884 ItemType*
const nextItem = pItem->pNext;
4885 ItemType*
const newItem = m_ItemAllocator.Alloc();
4886 newItem->pNext = nextItem;
4887 newItem->pPrev = pItem;
4888 pItem->pNext = newItem;
4889 if(nextItem != VMA_NULL)
4891 nextItem->pPrev = newItem;
4895 VMA_HEAVY_ASSERT(m_pBack == pItem);
4905 template<
typename T>
4906 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4908 ItemType*
const newItem = InsertBefore(pItem);
4909 newItem->Value = value;
4913 template<
typename T>
4914 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4916 ItemType*
const newItem = InsertAfter(pItem);
4917 newItem->Value = value;
4921 template<
typename T,
typename AllocatorT>
4924 VMA_CLASS_NO_COPY(VmaList)
4935 T& operator*()
const
4937 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4938 return m_pItem->Value;
4940 T* operator->()
const
4942 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4943 return &m_pItem->Value;
4946 iterator& operator++()
4948 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4949 m_pItem = m_pItem->pNext;
4952 iterator& operator--()
4954 if(m_pItem != VMA_NULL)
4956 m_pItem = m_pItem->pPrev;
4960 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4961 m_pItem = m_pList->Back();
4966 iterator operator++(
int)
4968 iterator result = *
this;
4972 iterator operator--(
int)
4974 iterator result = *
this;
4979 bool operator==(
const iterator& rhs)
const
4981 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4982 return m_pItem == rhs.m_pItem;
4984 bool operator!=(
const iterator& rhs)
const
4986 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4987 return m_pItem != rhs.m_pItem;
4991 VmaRawList<T>* m_pList;
4992 VmaListItem<T>* m_pItem;
4994 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5000 friend class VmaList<T, AllocatorT>;
5003 class const_iterator
5012 const_iterator(
const iterator& src) :
5013 m_pList(src.m_pList),
5014 m_pItem(src.m_pItem)
5018 const T& operator*()
const
5020 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5021 return m_pItem->Value;
5023 const T* operator->()
const
5025 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5026 return &m_pItem->Value;
5029 const_iterator& operator++()
5031 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5032 m_pItem = m_pItem->pNext;
5035 const_iterator& operator--()
5037 if(m_pItem != VMA_NULL)
5039 m_pItem = m_pItem->pPrev;
5043 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5044 m_pItem = m_pList->Back();
5049 const_iterator operator++(
int)
5051 const_iterator result = *
this;
5055 const_iterator operator--(
int)
5057 const_iterator result = *
this;
5062 bool operator==(
const const_iterator& rhs)
const
5064 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5065 return m_pItem == rhs.m_pItem;
5067 bool operator!=(
const const_iterator& rhs)
const
5069 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5070 return m_pItem != rhs.m_pItem;
5074 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5080 const VmaRawList<T>* m_pList;
5081 const VmaListItem<T>* m_pItem;
5083 friend class VmaList<T, AllocatorT>;
5086 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5088 bool empty()
const {
return m_RawList.IsEmpty(); }
5089 size_t size()
const {
return m_RawList.GetCount(); }
5091 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5092 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5094 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5095 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5097 void clear() { m_RawList.Clear(); }
5098 void push_back(
const T& value) { m_RawList.PushBack(value); }
5099 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5100 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5103 VmaRawList<T> m_RawList;
5106 #endif // #if VMA_USE_STL_LIST
5114 #if VMA_USE_STL_UNORDERED_MAP
5116 #define VmaPair std::pair
5118 #define VMA_MAP_TYPE(KeyT, ValueT) \
5119 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5121 #else // #if VMA_USE_STL_UNORDERED_MAP
5123 template<
typename T1,
typename T2>
5129 VmaPair() : first(), second() { }
5130 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5136 template<
typename KeyT,
typename ValueT>
5140 typedef VmaPair<KeyT, ValueT> PairType;
5141 typedef PairType* iterator;
5143 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5145 iterator begin() {
return m_Vector.begin(); }
5146 iterator end() {
return m_Vector.end(); }
5148 void insert(
const PairType& pair);
5149 iterator find(
const KeyT& key);
5150 void erase(iterator it);
5153 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5156 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5158 template<
typename FirstT,
typename SecondT>
5159 struct VmaPairFirstLess
5161 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5163 return lhs.first < rhs.first;
5165 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5167 return lhs.first < rhsFirst;
5171 template<
typename KeyT,
typename ValueT>
5172 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5174 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5176 m_Vector.data() + m_Vector.size(),
5178 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5179 VmaVectorInsert(m_Vector, indexToInsert, pair);
5182 template<
typename KeyT,
typename ValueT>
5183 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5185 PairType* it = VmaBinaryFindFirstNotLess(
5187 m_Vector.data() + m_Vector.size(),
5189 VmaPairFirstLess<KeyT, ValueT>());
5190 if((it != m_Vector.end()) && (it->first == key))
5196 return m_Vector.end();
5200 template<
typename KeyT,
typename ValueT>
5201 void VmaMap<KeyT, ValueT>::erase(iterator it)
5203 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5206 #endif // #if VMA_USE_STL_UNORDERED_MAP
5212 class VmaDeviceMemoryBlock;
5214 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5216 struct VmaAllocation_T
5219 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5223 FLAG_USER_DATA_STRING = 0x01,
5227 enum ALLOCATION_TYPE
5229 ALLOCATION_TYPE_NONE,
5230 ALLOCATION_TYPE_BLOCK,
5231 ALLOCATION_TYPE_DEDICATED,
5238 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
5242 m_pUserData = VMA_NULL;
5243 m_LastUseFrameIndex = currentFrameIndex;
5244 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
5245 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
5247 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
5249 #if VMA_STATS_STRING_ENABLED
5250 m_CreationFrameIndex = currentFrameIndex;
5251 m_BufferImageUsage = 0;
5257 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5260 VMA_ASSERT(m_pUserData == VMA_NULL);
5263 void InitBlockAllocation(
5264 VmaDeviceMemoryBlock* block,
5265 VkDeviceSize offset,
5266 VkDeviceSize alignment,
5268 VmaSuballocationType suballocationType,
5272 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5273 VMA_ASSERT(block != VMA_NULL);
5274 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5275 m_Alignment = alignment;
5277 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5278 m_SuballocationType = (uint8_t)suballocationType;
5279 m_BlockAllocation.m_Block = block;
5280 m_BlockAllocation.m_Offset = offset;
5281 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5286 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5287 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5288 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5289 m_BlockAllocation.m_Block = VMA_NULL;
5290 m_BlockAllocation.m_Offset = 0;
5291 m_BlockAllocation.m_CanBecomeLost =
true;
5294 void ChangeBlockAllocation(
5296 VmaDeviceMemoryBlock* block,
5297 VkDeviceSize offset);
5299 void ChangeOffset(VkDeviceSize newOffset);
5302 void InitDedicatedAllocation(
5303 uint32_t memoryTypeIndex,
5304 VkDeviceMemory hMemory,
5305 VmaSuballocationType suballocationType,
5309 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5310 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5311 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5314 m_SuballocationType = (uint8_t)suballocationType;
5315 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5316 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5317 m_DedicatedAllocation.m_hMemory = hMemory;
5318 m_DedicatedAllocation.m_pMappedData = pMappedData;
5321 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5322 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5323 VkDeviceSize GetSize()
const {
return m_Size; }
5324 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5325 void* GetUserData()
const {
return m_pUserData; }
5326 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5327 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5329 VmaDeviceMemoryBlock* GetBlock()
const
5331 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5332 return m_BlockAllocation.m_Block;
5334 VkDeviceSize GetOffset()
const;
5335 VkDeviceMemory GetMemory()
const;
5336 uint32_t GetMemoryTypeIndex()
const;
5337 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5338 void* GetMappedData()
const;
5339 bool CanBecomeLost()
const;
5341 uint32_t GetLastUseFrameIndex()
const
5343 return m_LastUseFrameIndex.load();
5345 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5347 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5357 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5359 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5361 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5372 void BlockAllocMap();
5373 void BlockAllocUnmap();
5374 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5377 #if VMA_STATS_STRING_ENABLED
5378 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5379 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5381 void InitBufferImageUsage(uint32_t bufferImageUsage)
5383 VMA_ASSERT(m_BufferImageUsage == 0);
5384 m_BufferImageUsage = bufferImageUsage;
5387 void PrintParameters(
class VmaJsonWriter& json)
const;
5391 VkDeviceSize m_Alignment;
5392 VkDeviceSize m_Size;
5394 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5396 uint8_t m_SuballocationType;
5403 struct BlockAllocation
5405 VmaDeviceMemoryBlock* m_Block;
5406 VkDeviceSize m_Offset;
5407 bool m_CanBecomeLost;
5411 struct DedicatedAllocation
5413 uint32_t m_MemoryTypeIndex;
5414 VkDeviceMemory m_hMemory;
5415 void* m_pMappedData;
5421 BlockAllocation m_BlockAllocation;
5423 DedicatedAllocation m_DedicatedAllocation;
5426 #if VMA_STATS_STRING_ENABLED
5427 uint32_t m_CreationFrameIndex;
5428 uint32_t m_BufferImageUsage;
5438 struct VmaSuballocation
5440 VkDeviceSize offset;
5443 VmaSuballocationType type;
5447 struct VmaSuballocationOffsetLess
5449 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5451 return lhs.offset < rhs.offset;
5454 struct VmaSuballocationOffsetGreater
5456 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5458 return lhs.offset > rhs.offset;
5462 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5465 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5467 enum class VmaAllocationRequestType
5489 struct VmaAllocationRequest
5491 VkDeviceSize offset;
5492 VkDeviceSize sumFreeSize;
5493 VkDeviceSize sumItemSize;
5494 VmaSuballocationList::iterator item;
5495 size_t itemsToMakeLostCount;
5497 VmaAllocationRequestType type;
5499 VkDeviceSize CalcCost()
const
5501 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5509 class VmaBlockMetadata
5513 virtual ~VmaBlockMetadata() { }
5514 virtual void Init(VkDeviceSize size) { m_Size = size; }
5517 virtual bool Validate()
const = 0;
5518 VkDeviceSize GetSize()
const {
return m_Size; }
5519 virtual size_t GetAllocationCount()
const = 0;
5520 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5521 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5523 virtual bool IsEmpty()
const = 0;
5525 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5527 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5529 #if VMA_STATS_STRING_ENABLED
5530 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5536 virtual bool CreateAllocationRequest(
5537 uint32_t currentFrameIndex,
5538 uint32_t frameInUseCount,
5539 VkDeviceSize bufferImageGranularity,
5540 VkDeviceSize allocSize,
5541 VkDeviceSize allocAlignment,
5543 VmaSuballocationType allocType,
5544 bool canMakeOtherLost,
5547 VmaAllocationRequest* pAllocationRequest) = 0;
5549 virtual bool MakeRequestedAllocationsLost(
5550 uint32_t currentFrameIndex,
5551 uint32_t frameInUseCount,
5552 VmaAllocationRequest* pAllocationRequest) = 0;
5554 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5556 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5560 const VmaAllocationRequest& request,
5561 VmaSuballocationType type,
5562 VkDeviceSize allocSize,
5567 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5570 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5572 #if VMA_STATS_STRING_ENABLED
5573 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5574 VkDeviceSize unusedBytes,
5575 size_t allocationCount,
5576 size_t unusedRangeCount)
const;
5577 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5578 VkDeviceSize offset,
5580 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5581 VkDeviceSize offset,
5582 VkDeviceSize size)
const;
5583 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5587 VkDeviceSize m_Size;
5588 const VkAllocationCallbacks* m_pAllocationCallbacks;
5591 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
5592 VMA_ASSERT(0 && "Validation failed: " #cond); \
5596 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5598 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5601 virtual ~VmaBlockMetadata_Generic();
5602 virtual void Init(VkDeviceSize size);
5604 virtual bool Validate()
const;
5605 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5606 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5607 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5608 virtual bool IsEmpty()
const;
5610 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5611 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5613 #if VMA_STATS_STRING_ENABLED
5614 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5617 virtual bool CreateAllocationRequest(
5618 uint32_t currentFrameIndex,
5619 uint32_t frameInUseCount,
5620 VkDeviceSize bufferImageGranularity,
5621 VkDeviceSize allocSize,
5622 VkDeviceSize allocAlignment,
5624 VmaSuballocationType allocType,
5625 bool canMakeOtherLost,
5627 VmaAllocationRequest* pAllocationRequest);
5629 virtual bool MakeRequestedAllocationsLost(
5630 uint32_t currentFrameIndex,
5631 uint32_t frameInUseCount,
5632 VmaAllocationRequest* pAllocationRequest);
5634 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5636 virtual VkResult CheckCorruption(
const void* pBlockData);
5639 const VmaAllocationRequest& request,
5640 VmaSuballocationType type,
5641 VkDeviceSize allocSize,
5645 virtual void FreeAtOffset(VkDeviceSize offset);
5650 bool IsBufferImageGranularityConflictPossible(
5651 VkDeviceSize bufferImageGranularity,
5652 VmaSuballocationType& inOutPrevSuballocType)
const;
5655 friend class VmaDefragmentationAlgorithm_Generic;
5656 friend class VmaDefragmentationAlgorithm_Fast;
5658 uint32_t m_FreeCount;
5659 VkDeviceSize m_SumFreeSize;
5660 VmaSuballocationList m_Suballocations;
5663 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5665 bool ValidateFreeSuballocationList()
const;
5669 bool CheckAllocation(
5670 uint32_t currentFrameIndex,
5671 uint32_t frameInUseCount,
5672 VkDeviceSize bufferImageGranularity,
5673 VkDeviceSize allocSize,
5674 VkDeviceSize allocAlignment,
5675 VmaSuballocationType allocType,
5676 VmaSuballocationList::const_iterator suballocItem,
5677 bool canMakeOtherLost,
5678 VkDeviceSize* pOffset,
5679 size_t* itemsToMakeLostCount,
5680 VkDeviceSize* pSumFreeSize,
5681 VkDeviceSize* pSumItemSize)
const;
5683 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5687 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5690 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5693 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5774 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5776 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5779 virtual ~VmaBlockMetadata_Linear();
5780 virtual void Init(VkDeviceSize size);
5782 virtual bool Validate()
const;
5783 virtual size_t GetAllocationCount()
const;
5784 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5785 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5786 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5788 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5789 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5791 #if VMA_STATS_STRING_ENABLED
5792 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5795 virtual bool CreateAllocationRequest(
5796 uint32_t currentFrameIndex,
5797 uint32_t frameInUseCount,
5798 VkDeviceSize bufferImageGranularity,
5799 VkDeviceSize allocSize,
5800 VkDeviceSize allocAlignment,
5802 VmaSuballocationType allocType,
5803 bool canMakeOtherLost,
5805 VmaAllocationRequest* pAllocationRequest);
5807 virtual bool MakeRequestedAllocationsLost(
5808 uint32_t currentFrameIndex,
5809 uint32_t frameInUseCount,
5810 VmaAllocationRequest* pAllocationRequest);
5812 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5814 virtual VkResult CheckCorruption(
const void* pBlockData);
5817 const VmaAllocationRequest& request,
5818 VmaSuballocationType type,
5819 VkDeviceSize allocSize,
5823 virtual void FreeAtOffset(VkDeviceSize offset);
5833 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5835 enum SECOND_VECTOR_MODE
5837 SECOND_VECTOR_EMPTY,
5842 SECOND_VECTOR_RING_BUFFER,
5848 SECOND_VECTOR_DOUBLE_STACK,
5851 VkDeviceSize m_SumFreeSize;
5852 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5853 uint32_t m_1stVectorIndex;
5854 SECOND_VECTOR_MODE m_2ndVectorMode;
5856 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5857 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5858 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5859 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5862 size_t m_1stNullItemsBeginCount;
5864 size_t m_1stNullItemsMiddleCount;
5866 size_t m_2ndNullItemsCount;
5868 bool ShouldCompact1st()
const;
5869 void CleanupAfterFree();
5871 bool CreateAllocationRequest_LowerAddress(
5872 uint32_t currentFrameIndex,
5873 uint32_t frameInUseCount,
5874 VkDeviceSize bufferImageGranularity,
5875 VkDeviceSize allocSize,
5876 VkDeviceSize allocAlignment,
5877 VmaSuballocationType allocType,
5878 bool canMakeOtherLost,
5880 VmaAllocationRequest* pAllocationRequest);
5881 bool CreateAllocationRequest_UpperAddress(
5882 uint32_t currentFrameIndex,
5883 uint32_t frameInUseCount,
5884 VkDeviceSize bufferImageGranularity,
5885 VkDeviceSize allocSize,
5886 VkDeviceSize allocAlignment,
5887 VmaSuballocationType allocType,
5888 bool canMakeOtherLost,
5890 VmaAllocationRequest* pAllocationRequest);
5904 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5906 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5909 virtual ~VmaBlockMetadata_Buddy();
5910 virtual void Init(VkDeviceSize size);
5912 virtual bool Validate()
const;
5913 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5914 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5915 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5916 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5918 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5919 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5921 #if VMA_STATS_STRING_ENABLED
5922 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5925 virtual bool CreateAllocationRequest(
5926 uint32_t currentFrameIndex,
5927 uint32_t frameInUseCount,
5928 VkDeviceSize bufferImageGranularity,
5929 VkDeviceSize allocSize,
5930 VkDeviceSize allocAlignment,
5932 VmaSuballocationType allocType,
5933 bool canMakeOtherLost,
5935 VmaAllocationRequest* pAllocationRequest);
5937 virtual bool MakeRequestedAllocationsLost(
5938 uint32_t currentFrameIndex,
5939 uint32_t frameInUseCount,
5940 VmaAllocationRequest* pAllocationRequest);
5942 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5944 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5947 const VmaAllocationRequest& request,
5948 VmaSuballocationType type,
5949 VkDeviceSize allocSize,
5952 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5953 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5956 static const VkDeviceSize MIN_NODE_SIZE = 32;
5957 static const size_t MAX_LEVELS = 30;
5959 struct ValidationContext
5961 size_t calculatedAllocationCount;
5962 size_t calculatedFreeCount;
5963 VkDeviceSize calculatedSumFreeSize;
5965 ValidationContext() :
5966 calculatedAllocationCount(0),
5967 calculatedFreeCount(0),
5968 calculatedSumFreeSize(0) { }
5973 VkDeviceSize offset;
6003 VkDeviceSize m_UsableSize;
6004 uint32_t m_LevelCount;
6010 } m_FreeList[MAX_LEVELS];
6012 size_t m_AllocationCount;
6016 VkDeviceSize m_SumFreeSize;
6018 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6019 void DeleteNode(Node* node);
6020 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6021 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6022 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6024 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6025 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6029 void AddToFreeListFront(uint32_t level, Node* node);
6033 void RemoveFromFreeList(uint32_t level, Node* node);
6035 #if VMA_STATS_STRING_ENABLED
6036 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6046 class VmaDeviceMemoryBlock
6048 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6050 VmaBlockMetadata* m_pMetadata;
6054 ~VmaDeviceMemoryBlock()
6056 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6057 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6064 uint32_t newMemoryTypeIndex,
6065 VkDeviceMemory newMemory,
6066 VkDeviceSize newSize,
6068 uint32_t algorithm);
6072 VmaPool GetParentPool()
const {
return m_hParentPool; }
6073 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6074 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6075 uint32_t GetId()
const {
return m_Id; }
6076 void* GetMappedData()
const {
return m_pMappedData; }
6079 bool Validate()
const;
6084 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6087 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6088 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6090 VkResult BindBufferMemory(
6093 VkDeviceSize allocationLocalOffset,
6096 VkResult BindImageMemory(
6099 VkDeviceSize allocationLocalOffset,
6105 uint32_t m_MemoryTypeIndex;
6107 VkDeviceMemory m_hMemory;
6115 uint32_t m_MapCount;
6116 void* m_pMappedData;
6119 struct VmaPointerLess
6121 bool operator()(
const void* lhs,
const void* rhs)
const
6127 struct VmaDefragmentationMove
6129 size_t srcBlockIndex;
6130 size_t dstBlockIndex;
6131 VkDeviceSize srcOffset;
6132 VkDeviceSize dstOffset;
6136 class VmaDefragmentationAlgorithm;
6144 struct VmaBlockVector
6146 VMA_CLASS_NO_COPY(VmaBlockVector)
6151 uint32_t memoryTypeIndex,
6152 VkDeviceSize preferredBlockSize,
6153 size_t minBlockCount,
6154 size_t maxBlockCount,
6155 VkDeviceSize bufferImageGranularity,
6156 uint32_t frameInUseCount,
6158 bool explicitBlockSize,
6159 uint32_t algorithm);
6162 VkResult CreateMinBlocks();
6164 VmaPool GetParentPool()
const {
return m_hParentPool; }
6165 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6166 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6167 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6168 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6169 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6173 bool IsEmpty()
const {
return m_Blocks.empty(); }
6174 bool IsCorruptionDetectionEnabled()
const;
6177 uint32_t currentFrameIndex,
6179 VkDeviceSize alignment,
6181 VmaSuballocationType suballocType,
6182 size_t allocationCount,
6190 #if VMA_STATS_STRING_ENABLED
6191 void PrintDetailedMap(
class VmaJsonWriter& json);
6194 void MakePoolAllocationsLost(
6195 uint32_t currentFrameIndex,
6196 size_t* pLostAllocationCount);
6197 VkResult CheckCorruption();
6201 class VmaBlockVectorDefragmentationContext* pCtx,
6203 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6204 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6205 VkCommandBuffer commandBuffer);
6206 void DefragmentationEnd(
6207 class VmaBlockVectorDefragmentationContext* pCtx,
6213 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6214 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6215 size_t CalcAllocationCount()
const;
6216 bool IsBufferImageGranularityConflictPossible()
const;
6219 friend class VmaDefragmentationAlgorithm_Generic;
6223 const uint32_t m_MemoryTypeIndex;
6224 const VkDeviceSize m_PreferredBlockSize;
6225 const size_t m_MinBlockCount;
6226 const size_t m_MaxBlockCount;
6227 const VkDeviceSize m_BufferImageGranularity;
6228 const uint32_t m_FrameInUseCount;
6229 const bool m_IsCustomPool;
6230 const bool m_ExplicitBlockSize;
6231 const uint32_t m_Algorithm;
6235 bool m_HasEmptyBlock;
6236 VMA_RW_MUTEX m_Mutex;
6238 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6239 uint32_t m_NextBlockId;
6241 VkDeviceSize CalcMaxBlockSize()
const;
6244 void Remove(VmaDeviceMemoryBlock* pBlock);
6248 void IncrementallySortBlocks();
6250 VkResult AllocatePage(
6251 uint32_t currentFrameIndex,
6253 VkDeviceSize alignment,
6255 VmaSuballocationType suballocType,
6259 VkResult AllocateFromBlock(
6260 VmaDeviceMemoryBlock* pBlock,
6261 uint32_t currentFrameIndex,
6263 VkDeviceSize alignment,
6266 VmaSuballocationType suballocType,
6270 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6273 void ApplyDefragmentationMovesCpu(
6274 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6275 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6277 void ApplyDefragmentationMovesGpu(
6278 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6279 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6280 VkCommandBuffer commandBuffer);
6291 VMA_CLASS_NO_COPY(VmaPool_T)
6293 VmaBlockVector m_BlockVector;
6298 VkDeviceSize preferredBlockSize);
6301 uint32_t GetId()
const {
return m_Id; }
6302 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6304 #if VMA_STATS_STRING_ENABLED
6319 class VmaDefragmentationAlgorithm
6321 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6323 VmaDefragmentationAlgorithm(
6325 VmaBlockVector* pBlockVector,
6326 uint32_t currentFrameIndex) :
6327 m_hAllocator(hAllocator),
6328 m_pBlockVector(pBlockVector),
6329 m_CurrentFrameIndex(currentFrameIndex)
6332 virtual ~VmaDefragmentationAlgorithm()
6336 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6337 virtual void AddAll() = 0;
6339 virtual VkResult Defragment(
6340 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6341 VkDeviceSize maxBytesToMove,
6342 uint32_t maxAllocationsToMove) = 0;
6344 virtual VkDeviceSize GetBytesMoved()
const = 0;
6345 virtual uint32_t GetAllocationsMoved()
const = 0;
6349 VmaBlockVector*
const m_pBlockVector;
6350 const uint32_t m_CurrentFrameIndex;
6352 struct AllocationInfo
6355 VkBool32* m_pChanged;
6358 m_hAllocation(VK_NULL_HANDLE),
6359 m_pChanged(VMA_NULL)
6363 m_hAllocation(hAlloc),
6364 m_pChanged(pChanged)
6370 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6372 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6374 VmaDefragmentationAlgorithm_Generic(
6376 VmaBlockVector* pBlockVector,
6377 uint32_t currentFrameIndex,
6378 bool overlappingMoveSupported);
6379 virtual ~VmaDefragmentationAlgorithm_Generic();
6381 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6382 virtual void AddAll() { m_AllAllocations =
true; }
6384 virtual VkResult Defragment(
6385 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6386 VkDeviceSize maxBytesToMove,
6387 uint32_t maxAllocationsToMove);
6389 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6390 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6393 uint32_t m_AllocationCount;
6394 bool m_AllAllocations;
6396 VkDeviceSize m_BytesMoved;
6397 uint32_t m_AllocationsMoved;
6399 struct AllocationInfoSizeGreater
6401 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6403 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6407 struct AllocationInfoOffsetGreater
6409 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6411 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6417 size_t m_OriginalBlockIndex;
6418 VmaDeviceMemoryBlock* m_pBlock;
6419 bool m_HasNonMovableAllocations;
6420 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6422 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6423 m_OriginalBlockIndex(SIZE_MAX),
6425 m_HasNonMovableAllocations(true),
6426 m_Allocations(pAllocationCallbacks)
6430 void CalcHasNonMovableAllocations()
6432 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6433 const size_t defragmentAllocCount = m_Allocations.size();
6434 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6437 void SortAllocationsBySizeDescending()
6439 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6442 void SortAllocationsByOffsetDescending()
6444 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6448 struct BlockPointerLess
6450 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
6452 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6454 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6456 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6462 struct BlockInfoCompareMoveDestination
6464 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6466 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6470 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6474 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6482 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6483 BlockInfoVector m_Blocks;
6485 VkResult DefragmentRound(
6486 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6487 VkDeviceSize maxBytesToMove,
6488 uint32_t maxAllocationsToMove);
6490 size_t CalcBlocksWithNonMovableCount()
const;
6492 static bool MoveMakesSense(
6493 size_t dstBlockIndex, VkDeviceSize dstOffset,
6494 size_t srcBlockIndex, VkDeviceSize srcOffset);
6497 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6499 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6501 VmaDefragmentationAlgorithm_Fast(
6503 VmaBlockVector* pBlockVector,
6504 uint32_t currentFrameIndex,
6505 bool overlappingMoveSupported);
6506 virtual ~VmaDefragmentationAlgorithm_Fast();
6508 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6509 virtual void AddAll() { m_AllAllocations =
true; }
6511 virtual VkResult Defragment(
6512 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6513 VkDeviceSize maxBytesToMove,
6514 uint32_t maxAllocationsToMove);
6516 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6517 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6522 size_t origBlockIndex;
6525 class FreeSpaceDatabase
6531 s.blockInfoIndex = SIZE_MAX;
6532 for(
size_t i = 0; i < MAX_COUNT; ++i)
6534 m_FreeSpaces[i] = s;
6538 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6540 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6546 size_t bestIndex = SIZE_MAX;
6547 for(
size_t i = 0; i < MAX_COUNT; ++i)
6550 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6555 if(m_FreeSpaces[i].size < size &&
6556 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6562 if(bestIndex != SIZE_MAX)
6564 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6565 m_FreeSpaces[bestIndex].offset = offset;
6566 m_FreeSpaces[bestIndex].size = size;
6570 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6571 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6573 size_t bestIndex = SIZE_MAX;
6574 VkDeviceSize bestFreeSpaceAfter = 0;
6575 for(
size_t i = 0; i < MAX_COUNT; ++i)
6578 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6580 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6582 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6584 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6586 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6589 bestFreeSpaceAfter = freeSpaceAfter;
6595 if(bestIndex != SIZE_MAX)
6597 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6598 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6600 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6603 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6604 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6605 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6610 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6620 static const size_t MAX_COUNT = 4;
6624 size_t blockInfoIndex;
6625 VkDeviceSize offset;
6627 } m_FreeSpaces[MAX_COUNT];
6630 const bool m_OverlappingMoveSupported;
6632 uint32_t m_AllocationCount;
6633 bool m_AllAllocations;
6635 VkDeviceSize m_BytesMoved;
6636 uint32_t m_AllocationsMoved;
6638 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6640 void PreprocessMetadata();
6641 void PostprocessMetadata();
6642 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6645 struct VmaBlockDefragmentationContext
6649 BLOCK_FLAG_USED = 0x00000001,
6655 class VmaBlockVectorDefragmentationContext
6657 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6661 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6663 VmaBlockVectorDefragmentationContext(
6666 VmaBlockVector* pBlockVector,
6667 uint32_t currFrameIndex);
6668 ~VmaBlockVectorDefragmentationContext();
6670 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6671 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6672 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6674 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6675 void AddAll() { m_AllAllocations =
true; }
6677 void Begin(
bool overlappingMoveSupported);
6684 VmaBlockVector*
const m_pBlockVector;
6685 const uint32_t m_CurrFrameIndex;
6687 VmaDefragmentationAlgorithm* m_pAlgorithm;
6695 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6696 bool m_AllAllocations;
6699 struct VmaDefragmentationContext_T
6702 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6704 VmaDefragmentationContext_T(
6706 uint32_t currFrameIndex,
6709 ~VmaDefragmentationContext_T();
6711 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6712 void AddAllocations(
6713 uint32_t allocationCount,
6715 VkBool32* pAllocationsChanged);
6723 VkResult Defragment(
6724 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6725 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6730 const uint32_t m_CurrFrameIndex;
6731 const uint32_t m_Flags;
6734 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6736 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6739 #if VMA_RECORDING_ENABLED
6746 void WriteConfiguration(
6747 const VkPhysicalDeviceProperties& devProps,
6748 const VkPhysicalDeviceMemoryProperties& memProps,
6749 bool dedicatedAllocationExtensionEnabled,
6750 bool bindMemory2ExtensionEnabled);
6753 void RecordCreateAllocator(uint32_t frameIndex);
6754 void RecordDestroyAllocator(uint32_t frameIndex);
6755 void RecordCreatePool(uint32_t frameIndex,
6758 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6759 void RecordAllocateMemory(uint32_t frameIndex,
6760 const VkMemoryRequirements& vkMemReq,
6763 void RecordAllocateMemoryPages(uint32_t frameIndex,
6764 const VkMemoryRequirements& vkMemReq,
6766 uint64_t allocationCount,
6768 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6769 const VkMemoryRequirements& vkMemReq,
6770 bool requiresDedicatedAllocation,
6771 bool prefersDedicatedAllocation,
6774 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6775 const VkMemoryRequirements& vkMemReq,
6776 bool requiresDedicatedAllocation,
6777 bool prefersDedicatedAllocation,
6780 void RecordFreeMemory(uint32_t frameIndex,
6782 void RecordFreeMemoryPages(uint32_t frameIndex,
6783 uint64_t allocationCount,
6785 void RecordSetAllocationUserData(uint32_t frameIndex,
6787 const void* pUserData);
6788 void RecordCreateLostAllocation(uint32_t frameIndex,
6790 void RecordMapMemory(uint32_t frameIndex,
6792 void RecordUnmapMemory(uint32_t frameIndex,
6794 void RecordFlushAllocation(uint32_t frameIndex,
6795 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6796 void RecordInvalidateAllocation(uint32_t frameIndex,
6797 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6798 void RecordCreateBuffer(uint32_t frameIndex,
6799 const VkBufferCreateInfo& bufCreateInfo,
6802 void RecordCreateImage(uint32_t frameIndex,
6803 const VkImageCreateInfo& imageCreateInfo,
6806 void RecordDestroyBuffer(uint32_t frameIndex,
6808 void RecordDestroyImage(uint32_t frameIndex,
6810 void RecordTouchAllocation(uint32_t frameIndex,
6812 void RecordGetAllocationInfo(uint32_t frameIndex,
6814 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6816 void RecordDefragmentationBegin(uint32_t frameIndex,
6819 void RecordDefragmentationEnd(uint32_t frameIndex,
6829 class UserDataString
6833 const char* GetString()
const {
return m_Str; }
6843 VMA_MUTEX m_FileMutex;
6845 int64_t m_StartCounter;
6847 void GetBasicParams(CallParams& outParams);
6850 template<
typename T>
6851 void PrintPointerList(uint64_t count,
const T* pItems)
6855 fprintf(m_File,
"%p", pItems[0]);
6856 for(uint64_t i = 1; i < count; ++i)
6858 fprintf(m_File,
" %p", pItems[i]);
6863 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6867 #endif // #if VMA_RECORDING_ENABLED
6872 class VmaAllocationObjectAllocator
6874 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6876 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6883 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6886 struct VmaCurrentBudgetData
6888 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
6889 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
6891 #if VMA_MEMORY_BUDGET
6892 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
6893 VMA_RW_MUTEX m_BudgetMutex;
6894 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
6895 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
6896 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
6897 #endif // #if VMA_MEMORY_BUDGET
6899 VmaCurrentBudgetData()
6901 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
6903 m_BlockBytes[heapIndex] = 0;
6904 m_AllocationBytes[heapIndex] = 0;
6905 #if VMA_MEMORY_BUDGET
6906 m_VulkanUsage[heapIndex] = 0;
6907 m_VulkanBudget[heapIndex] = 0;
6908 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
6912 #if VMA_MEMORY_BUDGET
6913 m_OperationsSinceBudgetFetch = 0;
6919 struct VmaAllocator_T
6921 VMA_CLASS_NO_COPY(VmaAllocator_T)
6924 bool m_UseKhrDedicatedAllocation;
6925 bool m_UseKhrBindMemory2;
6926 bool m_UseExtMemoryBudget;
6928 VkInstance m_hInstance;
6929 bool m_AllocationCallbacksSpecified;
6930 VkAllocationCallbacks m_AllocationCallbacks;
6932 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
6935 uint32_t m_HeapSizeLimitMask;
6937 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6938 VkPhysicalDeviceMemoryProperties m_MemProps;
6941 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6944 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6945 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6946 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6948 VmaCurrentBudgetData m_Budget;
6954 const VkAllocationCallbacks* GetAllocationCallbacks()
const
6956 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6960 return m_VulkanFunctions;
6963 VkDeviceSize GetBufferImageGranularity()
const
6966 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6967 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6970 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6971 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6973 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
6975 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6976 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6979 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
6981 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6982 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6985 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
6987 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6988 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6989 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6992 bool IsIntegratedGpu()
const
6994 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6997 #if VMA_RECORDING_ENABLED
6998 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7001 void GetBufferMemoryRequirements(
7003 VkMemoryRequirements& memReq,
7004 bool& requiresDedicatedAllocation,
7005 bool& prefersDedicatedAllocation)
const;
7006 void GetImageMemoryRequirements(
7008 VkMemoryRequirements& memReq,
7009 bool& requiresDedicatedAllocation,
7010 bool& prefersDedicatedAllocation)
const;
7013 VkResult AllocateMemory(
7014 const VkMemoryRequirements& vkMemReq,
7015 bool requiresDedicatedAllocation,
7016 bool prefersDedicatedAllocation,
7017 VkBuffer dedicatedBuffer,
7018 VkImage dedicatedImage,
7020 VmaSuballocationType suballocType,
7021 size_t allocationCount,
7026 size_t allocationCount,
7029 VkResult ResizeAllocation(
7031 VkDeviceSize newSize);
7033 void CalculateStats(
VmaStats* pStats);
7036 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7038 #if VMA_STATS_STRING_ENABLED
7039 void PrintDetailedMap(
class VmaJsonWriter& json);
7042 VkResult DefragmentationBegin(
7046 VkResult DefragmentationEnd(
7053 void DestroyPool(
VmaPool pool);
7056 void SetCurrentFrameIndex(uint32_t frameIndex);
7057 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7059 void MakePoolAllocationsLost(
7061 size_t* pLostAllocationCount);
7062 VkResult CheckPoolCorruption(
VmaPool hPool);
7063 VkResult CheckCorruption(uint32_t memoryTypeBits);
7068 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7070 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7072 VkResult BindVulkanBuffer(
7073 VkDeviceMemory memory,
7074 VkDeviceSize memoryOffset,
7078 VkResult BindVulkanImage(
7079 VkDeviceMemory memory,
7080 VkDeviceSize memoryOffset,
7087 VkResult BindBufferMemory(
7089 VkDeviceSize allocationLocalOffset,
7092 VkResult BindImageMemory(
7094 VkDeviceSize allocationLocalOffset,
7098 void FlushOrInvalidateAllocation(
7100 VkDeviceSize offset, VkDeviceSize size,
7101 VMA_CACHE_OPERATION op);
7103 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7109 uint32_t GetGpuDefragmentationMemoryTypeBits();
7112 VkDeviceSize m_PreferredLargeHeapBlockSize;
7114 VkPhysicalDevice m_PhysicalDevice;
7115 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7116 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7118 VMA_RW_MUTEX m_PoolsMutex;
7120 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7121 uint32_t m_NextPoolId;
7125 #if VMA_RECORDING_ENABLED
7126 VmaRecorder* m_pRecorder;
7131 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7133 VkResult AllocateMemoryOfType(
7135 VkDeviceSize alignment,
7136 bool dedicatedAllocation,
7137 VkBuffer dedicatedBuffer,
7138 VkImage dedicatedImage,
7140 uint32_t memTypeIndex,
7141 VmaSuballocationType suballocType,
7142 size_t allocationCount,
7146 VkResult AllocateDedicatedMemoryPage(
7148 VmaSuballocationType suballocType,
7149 uint32_t memTypeIndex,
7150 const VkMemoryAllocateInfo& allocInfo,
7152 bool isUserDataString,
7157 VkResult AllocateDedicatedMemory(
7159 VmaSuballocationType suballocType,
7160 uint32_t memTypeIndex,
7163 bool isUserDataString,
7165 VkBuffer dedicatedBuffer,
7166 VkImage dedicatedImage,
7167 size_t allocationCount,
7176 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7178 #if VMA_MEMORY_BUDGET
7179 void UpdateVulkanBudget();
7180 #endif // #if VMA_MEMORY_BUDGET
7186 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7188 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7191 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7193 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7196 template<
typename T>
7199 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7202 template<
typename T>
7203 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7205 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7208 template<
typename T>
7209 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7214 VmaFree(hAllocator, ptr);
7218 template<
typename T>
7219 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7223 for(
size_t i = count; i--; )
7225 VmaFree(hAllocator, ptr);
7232 #if VMA_STATS_STRING_ENABLED
7234 class VmaStringBuilder
7237 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7238 size_t GetLength()
const {
return m_Data.size(); }
7239 const char* GetData()
const {
return m_Data.data(); }
7241 void Add(
char ch) { m_Data.push_back(ch); }
7242 void Add(
const char* pStr);
7243 void AddNewLine() { Add(
'\n'); }
7244 void AddNumber(uint32_t num);
7245 void AddNumber(uint64_t num);
7246 void AddPointer(
const void* ptr);
7249 VmaVector< char, VmaStlAllocator<char> > m_Data;
7252 void VmaStringBuilder::Add(
const char* pStr)
7254 const size_t strLen = strlen(pStr);
7257 const size_t oldCount = m_Data.size();
7258 m_Data.resize(oldCount + strLen);
7259 memcpy(m_Data.data() + oldCount, pStr, strLen);
7263 void VmaStringBuilder::AddNumber(uint32_t num)
7270 *--p =
'0' + (num % 10);
7277 void VmaStringBuilder::AddNumber(uint64_t num)
7284 *--p =
'0' + (num % 10);
7291 void VmaStringBuilder::AddPointer(
const void* ptr)
7294 VmaPtrToStr(buf,
sizeof(buf), ptr);
7298 #endif // #if VMA_STATS_STRING_ENABLED
7303 #if VMA_STATS_STRING_ENABLED
7307 VMA_CLASS_NO_COPY(VmaJsonWriter)
7309 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7312 void BeginObject(
bool singleLine =
false);
7315 void BeginArray(
bool singleLine =
false);
7318 void WriteString(
const char* pStr);
7319 void BeginString(
const char* pStr = VMA_NULL);
7320 void ContinueString(
const char* pStr);
7321 void ContinueString(uint32_t n);
7322 void ContinueString(uint64_t n);
7323 void ContinueString_Pointer(
const void* ptr);
7324 void EndString(
const char* pStr = VMA_NULL);
7326 void WriteNumber(uint32_t n);
7327 void WriteNumber(uint64_t n);
7328 void WriteBool(
bool b);
7332 static const char*
const INDENT;
7334 enum COLLECTION_TYPE
7336 COLLECTION_TYPE_OBJECT,
7337 COLLECTION_TYPE_ARRAY,
7341 COLLECTION_TYPE type;
7342 uint32_t valueCount;
7343 bool singleLineMode;
7346 VmaStringBuilder& m_SB;
7347 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7348 bool m_InsideString;
7350 void BeginValue(
bool isString);
7351 void WriteIndent(
bool oneLess =
false);
7354 const char*
const VmaJsonWriter::INDENT =
" ";
7356 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7358 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7359 m_InsideString(false)
7363 VmaJsonWriter::~VmaJsonWriter()
7365 VMA_ASSERT(!m_InsideString);
7366 VMA_ASSERT(m_Stack.empty());
7369 void VmaJsonWriter::BeginObject(
bool singleLine)
7371 VMA_ASSERT(!m_InsideString);
7377 item.type = COLLECTION_TYPE_OBJECT;
7378 item.valueCount = 0;
7379 item.singleLineMode = singleLine;
7380 m_Stack.push_back(item);
7383 void VmaJsonWriter::EndObject()
7385 VMA_ASSERT(!m_InsideString);
7390 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7394 void VmaJsonWriter::BeginArray(
bool singleLine)
7396 VMA_ASSERT(!m_InsideString);
7402 item.type = COLLECTION_TYPE_ARRAY;
7403 item.valueCount = 0;
7404 item.singleLineMode = singleLine;
7405 m_Stack.push_back(item);
7408 void VmaJsonWriter::EndArray()
7410 VMA_ASSERT(!m_InsideString);
7415 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7419 void VmaJsonWriter::WriteString(
const char* pStr)
7425 void VmaJsonWriter::BeginString(
const char* pStr)
7427 VMA_ASSERT(!m_InsideString);
7431 m_InsideString =
true;
7432 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7434 ContinueString(pStr);
7438 void VmaJsonWriter::ContinueString(
const char* pStr)
7440 VMA_ASSERT(m_InsideString);
7442 const size_t strLen = strlen(pStr);
7443 for(
size_t i = 0; i < strLen; ++i)
7476 VMA_ASSERT(0 &&
"Character not currently supported.");
7482 void VmaJsonWriter::ContinueString(uint32_t n)
7484 VMA_ASSERT(m_InsideString);
7488 void VmaJsonWriter::ContinueString(uint64_t n)
7490 VMA_ASSERT(m_InsideString);
7494 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7496 VMA_ASSERT(m_InsideString);
7497 m_SB.AddPointer(ptr);
7500 void VmaJsonWriter::EndString(
const char* pStr)
7502 VMA_ASSERT(m_InsideString);
7503 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7505 ContinueString(pStr);
7508 m_InsideString =
false;
7511 void VmaJsonWriter::WriteNumber(uint32_t n)
7513 VMA_ASSERT(!m_InsideString);
7518 void VmaJsonWriter::WriteNumber(uint64_t n)
7520 VMA_ASSERT(!m_InsideString);
7525 void VmaJsonWriter::WriteBool(
bool b)
7527 VMA_ASSERT(!m_InsideString);
7529 m_SB.Add(b ?
"true" :
"false");
7532 void VmaJsonWriter::WriteNull()
7534 VMA_ASSERT(!m_InsideString);
7539 void VmaJsonWriter::BeginValue(
bool isString)
7541 if(!m_Stack.empty())
7543 StackItem& currItem = m_Stack.back();
7544 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7545 currItem.valueCount % 2 == 0)
7547 VMA_ASSERT(isString);
7550 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7551 currItem.valueCount % 2 != 0)
7555 else if(currItem.valueCount > 0)
7564 ++currItem.valueCount;
7568 void VmaJsonWriter::WriteIndent(
bool oneLess)
7570 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7574 size_t count = m_Stack.size();
7575 if(count > 0 && oneLess)
7579 for(
size_t i = 0; i < count; ++i)
7586 #endif // #if VMA_STATS_STRING_ENABLED
7590 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7592 if(IsUserDataString())
7594 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7596 FreeUserDataString(hAllocator);
7598 if(pUserData != VMA_NULL)
7600 const char*
const newStrSrc = (
char*)pUserData;
7601 const size_t newStrLen = strlen(newStrSrc);
7602 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7603 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7604 m_pUserData = newStrDst;
7609 m_pUserData = pUserData;
7613 void VmaAllocation_T::ChangeBlockAllocation(
7615 VmaDeviceMemoryBlock* block,
7616 VkDeviceSize offset)
7618 VMA_ASSERT(block != VMA_NULL);
7619 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7622 if(block != m_BlockAllocation.m_Block)
7624 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7625 if(IsPersistentMap())
7627 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7628 block->Map(hAllocator, mapRefCount, VMA_NULL);
7631 m_BlockAllocation.m_Block = block;
7632 m_BlockAllocation.m_Offset = offset;
7635 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7637 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7638 m_BlockAllocation.m_Offset = newOffset;
7641 VkDeviceSize VmaAllocation_T::GetOffset()
const
7645 case ALLOCATION_TYPE_BLOCK:
7646 return m_BlockAllocation.m_Offset;
7647 case ALLOCATION_TYPE_DEDICATED:
7655 VkDeviceMemory VmaAllocation_T::GetMemory()
const
7659 case ALLOCATION_TYPE_BLOCK:
7660 return m_BlockAllocation.m_Block->GetDeviceMemory();
7661 case ALLOCATION_TYPE_DEDICATED:
7662 return m_DedicatedAllocation.m_hMemory;
7665 return VK_NULL_HANDLE;
7669 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const
7673 case ALLOCATION_TYPE_BLOCK:
7674 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7675 case ALLOCATION_TYPE_DEDICATED:
7676 return m_DedicatedAllocation.m_MemoryTypeIndex;
7683 void* VmaAllocation_T::GetMappedData()
const
7687 case ALLOCATION_TYPE_BLOCK:
7690 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7691 VMA_ASSERT(pBlockData != VMA_NULL);
7692 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7699 case ALLOCATION_TYPE_DEDICATED:
7700 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7701 return m_DedicatedAllocation.m_pMappedData;
7708 bool VmaAllocation_T::CanBecomeLost()
const
7712 case ALLOCATION_TYPE_BLOCK:
7713 return m_BlockAllocation.m_CanBecomeLost;
7714 case ALLOCATION_TYPE_DEDICATED:
7722 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7724 VMA_ASSERT(CanBecomeLost());
7730 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7733 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7738 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7744 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7754 #if VMA_STATS_STRING_ENABLED
7757 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7766 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
7768 json.WriteString(
"Type");
7769 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7771 json.WriteString(
"Size");
7772 json.WriteNumber(m_Size);
7774 if(m_pUserData != VMA_NULL)
7776 json.WriteString(
"UserData");
7777 if(IsUserDataString())
7779 json.WriteString((
const char*)m_pUserData);
7784 json.ContinueString_Pointer(m_pUserData);
7789 json.WriteString(
"CreationFrameIndex");
7790 json.WriteNumber(m_CreationFrameIndex);
7792 json.WriteString(
"LastUseFrameIndex");
7793 json.WriteNumber(GetLastUseFrameIndex());
7795 if(m_BufferImageUsage != 0)
7797 json.WriteString(
"Usage");
7798 json.WriteNumber(m_BufferImageUsage);
7804 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7806 VMA_ASSERT(IsUserDataString());
7807 if(m_pUserData != VMA_NULL)
7809 char*
const oldStr = (
char*)m_pUserData;
7810 const size_t oldStrLen = strlen(oldStr);
7811 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7812 m_pUserData = VMA_NULL;
7816 void VmaAllocation_T::BlockAllocMap()
7818 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7820 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7826 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7830 void VmaAllocation_T::BlockAllocUnmap()
7832 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7834 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7840 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7844 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7846 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7850 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7852 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7853 *ppData = m_DedicatedAllocation.m_pMappedData;
7859 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7860 return VK_ERROR_MEMORY_MAP_FAILED;
7865 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7866 hAllocator->m_hDevice,
7867 m_DedicatedAllocation.m_hMemory,
7872 if(result == VK_SUCCESS)
7874 m_DedicatedAllocation.m_pMappedData = *ppData;
7881 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7883 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7885 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7890 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7891 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7892 hAllocator->m_hDevice,
7893 m_DedicatedAllocation.m_hMemory);
7898 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7902 #if VMA_STATS_STRING_ENABLED
7904 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7908 json.WriteString(
"Blocks");
7911 json.WriteString(
"Allocations");
7914 json.WriteString(
"UnusedRanges");
7917 json.WriteString(
"UsedBytes");
7920 json.WriteString(
"UnusedBytes");
7925 json.WriteString(
"AllocationSize");
7926 json.BeginObject(
true);
7927 json.WriteString(
"Min");
7929 json.WriteString(
"Avg");
7931 json.WriteString(
"Max");
7938 json.WriteString(
"UnusedRangeSize");
7939 json.BeginObject(
true);
7940 json.WriteString(
"Min");
7942 json.WriteString(
"Avg");
7944 json.WriteString(
"Max");
7952 #endif // #if VMA_STATS_STRING_ENABLED
7954 struct VmaSuballocationItemSizeLess
7957 const VmaSuballocationList::iterator lhs,
7958 const VmaSuballocationList::iterator rhs)
const
7960 return lhs->size < rhs->size;
7963 const VmaSuballocationList::iterator lhs,
7964 VkDeviceSize rhsSize)
const
7966 return lhs->size < rhsSize;
7974 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7976 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7980 #if VMA_STATS_STRING_ENABLED
7982 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7983 VkDeviceSize unusedBytes,
7984 size_t allocationCount,
7985 size_t unusedRangeCount)
const
7989 json.WriteString(
"TotalBytes");
7990 json.WriteNumber(GetSize());
7992 json.WriteString(
"UnusedBytes");
7993 json.WriteNumber(unusedBytes);
7995 json.WriteString(
"Allocations");
7996 json.WriteNumber((uint64_t)allocationCount);
7998 json.WriteString(
"UnusedRanges");
7999 json.WriteNumber((uint64_t)unusedRangeCount);
8001 json.WriteString(
"Suballocations");
8005 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8006 VkDeviceSize offset,
8009 json.BeginObject(
true);
8011 json.WriteString(
"Offset");
8012 json.WriteNumber(offset);
8014 hAllocation->PrintParameters(json);
8019 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8020 VkDeviceSize offset,
8021 VkDeviceSize size)
const
8023 json.BeginObject(
true);
8025 json.WriteString(
"Offset");
8026 json.WriteNumber(offset);
8028 json.WriteString(
"Type");
8029 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8031 json.WriteString(
"Size");
8032 json.WriteNumber(size);
8037 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8043 #endif // #if VMA_STATS_STRING_ENABLED
8048 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8049 VmaBlockMetadata(hAllocator),
8052 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8053 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8057 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8061 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8063 VmaBlockMetadata::Init(size);
8066 m_SumFreeSize = size;
8068 VmaSuballocation suballoc = {};
8069 suballoc.offset = 0;
8070 suballoc.size = size;
8071 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8072 suballoc.hAllocation = VK_NULL_HANDLE;
8074 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8075 m_Suballocations.push_back(suballoc);
8076 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8078 m_FreeSuballocationsBySize.push_back(suballocItem);
8081 bool VmaBlockMetadata_Generic::Validate()
const
8083 VMA_VALIDATE(!m_Suballocations.empty());
8086 VkDeviceSize calculatedOffset = 0;
8088 uint32_t calculatedFreeCount = 0;
8090 VkDeviceSize calculatedSumFreeSize = 0;
8093 size_t freeSuballocationsToRegister = 0;
8095 bool prevFree =
false;
8097 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8098 suballocItem != m_Suballocations.cend();
8101 const VmaSuballocation& subAlloc = *suballocItem;
8104 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8106 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8108 VMA_VALIDATE(!prevFree || !currFree);
8110 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8114 calculatedSumFreeSize += subAlloc.size;
8115 ++calculatedFreeCount;
8116 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8118 ++freeSuballocationsToRegister;
8122 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8126 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8127 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8130 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8133 calculatedOffset += subAlloc.size;
8134 prevFree = currFree;
8139 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8141 VkDeviceSize lastSize = 0;
8142 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8144 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8147 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8149 VMA_VALIDATE(suballocItem->size >= lastSize);
8151 lastSize = suballocItem->size;
8155 VMA_VALIDATE(ValidateFreeSuballocationList());
8156 VMA_VALIDATE(calculatedOffset == GetSize());
8157 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8158 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8163 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8165 if(!m_FreeSuballocationsBySize.empty())
8167 return m_FreeSuballocationsBySize.back()->size;
8175 bool VmaBlockMetadata_Generic::IsEmpty()
const
8177 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8180 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8184 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8196 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8197 suballocItem != m_Suballocations.cend();
8200 const VmaSuballocation& suballoc = *suballocItem;
8201 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8214 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8216 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8218 inoutStats.
size += GetSize();
8225 #if VMA_STATS_STRING_ENABLED
8227 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8229 PrintDetailedMap_Begin(json,
8231 m_Suballocations.size() - (size_t)m_FreeCount,
8235 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8236 suballocItem != m_Suballocations.cend();
8237 ++suballocItem, ++i)
8239 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8241 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8245 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8249 PrintDetailedMap_End(json);
8252 #endif // #if VMA_STATS_STRING_ENABLED
8254 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8255 uint32_t currentFrameIndex,
8256 uint32_t frameInUseCount,
8257 VkDeviceSize bufferImageGranularity,
8258 VkDeviceSize allocSize,
8259 VkDeviceSize allocAlignment,
8261 VmaSuballocationType allocType,
8262 bool canMakeOtherLost,
8264 VmaAllocationRequest* pAllocationRequest)
8266 VMA_ASSERT(allocSize > 0);
8267 VMA_ASSERT(!upperAddress);
8268 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8269 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8270 VMA_HEAVY_ASSERT(Validate());
8272 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8275 if(canMakeOtherLost ==
false &&
8276 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8282 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8283 if(freeSuballocCount > 0)
8288 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8289 m_FreeSuballocationsBySize.data(),
8290 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8291 allocSize + 2 * VMA_DEBUG_MARGIN,
8292 VmaSuballocationItemSizeLess());
8293 size_t index = it - m_FreeSuballocationsBySize.data();
8294 for(; index < freeSuballocCount; ++index)
8299 bufferImageGranularity,
8303 m_FreeSuballocationsBySize[index],
8305 &pAllocationRequest->offset,
8306 &pAllocationRequest->itemsToMakeLostCount,
8307 &pAllocationRequest->sumFreeSize,
8308 &pAllocationRequest->sumItemSize))
8310 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8315 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8317 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8318 it != m_Suballocations.end();
8321 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8324 bufferImageGranularity,
8330 &pAllocationRequest->offset,
8331 &pAllocationRequest->itemsToMakeLostCount,
8332 &pAllocationRequest->sumFreeSize,
8333 &pAllocationRequest->sumItemSize))
8335 pAllocationRequest->item = it;
8343 for(
size_t index = freeSuballocCount; index--; )
8348 bufferImageGranularity,
8352 m_FreeSuballocationsBySize[index],
8354 &pAllocationRequest->offset,
8355 &pAllocationRequest->itemsToMakeLostCount,
8356 &pAllocationRequest->sumFreeSize,
8357 &pAllocationRequest->sumItemSize))
8359 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8366 if(canMakeOtherLost)
8371 VmaAllocationRequest tmpAllocRequest = {};
8372 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8373 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8374 suballocIt != m_Suballocations.end();
8377 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8378 suballocIt->hAllocation->CanBecomeLost())
8383 bufferImageGranularity,
8389 &tmpAllocRequest.offset,
8390 &tmpAllocRequest.itemsToMakeLostCount,
8391 &tmpAllocRequest.sumFreeSize,
8392 &tmpAllocRequest.sumItemSize))
8396 *pAllocationRequest = tmpAllocRequest;
8397 pAllocationRequest->item = suballocIt;
8400 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8402 *pAllocationRequest = tmpAllocRequest;
8403 pAllocationRequest->item = suballocIt;
8416 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8417 uint32_t currentFrameIndex,
8418 uint32_t frameInUseCount,
8419 VmaAllocationRequest* pAllocationRequest)
8421 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8423 while(pAllocationRequest->itemsToMakeLostCount > 0)
8425 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8427 ++pAllocationRequest->item;
8429 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8430 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8431 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8432 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8434 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8435 --pAllocationRequest->itemsToMakeLostCount;
8443 VMA_HEAVY_ASSERT(Validate());
8444 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8445 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8450 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8452 uint32_t lostAllocationCount = 0;
8453 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8454 it != m_Suballocations.end();
8457 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8458 it->hAllocation->CanBecomeLost() &&
8459 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8461 it = FreeSuballocation(it);
8462 ++lostAllocationCount;
8465 return lostAllocationCount;
8468 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8470 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8471 it != m_Suballocations.end();
8474 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8476 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8478 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8479 return VK_ERROR_VALIDATION_FAILED_EXT;
8481 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8483 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8484 return VK_ERROR_VALIDATION_FAILED_EXT;
8492 void VmaBlockMetadata_Generic::Alloc(
8493 const VmaAllocationRequest& request,
8494 VmaSuballocationType type,
8495 VkDeviceSize allocSize,
8498 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8499 VMA_ASSERT(request.item != m_Suballocations.end());
8500 VmaSuballocation& suballoc = *request.item;
8502 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8504 VMA_ASSERT(request.offset >= suballoc.offset);
8505 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8506 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8507 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8511 UnregisterFreeSuballocation(request.item);
8513 suballoc.offset = request.offset;
8514 suballoc.size = allocSize;
8515 suballoc.type = type;
8516 suballoc.hAllocation = hAllocation;
8521 VmaSuballocation paddingSuballoc = {};
8522 paddingSuballoc.offset = request.offset + allocSize;
8523 paddingSuballoc.size = paddingEnd;
8524 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8525 VmaSuballocationList::iterator next = request.item;
8527 const VmaSuballocationList::iterator paddingEndItem =
8528 m_Suballocations.insert(next, paddingSuballoc);
8529 RegisterFreeSuballocation(paddingEndItem);
8535 VmaSuballocation paddingSuballoc = {};
8536 paddingSuballoc.offset = request.offset - paddingBegin;
8537 paddingSuballoc.size = paddingBegin;
8538 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8539 const VmaSuballocationList::iterator paddingBeginItem =
8540 m_Suballocations.insert(request.item, paddingSuballoc);
8541 RegisterFreeSuballocation(paddingBeginItem);
8545 m_FreeCount = m_FreeCount - 1;
8546 if(paddingBegin > 0)
8554 m_SumFreeSize -= allocSize;
8557 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8559 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8560 suballocItem != m_Suballocations.end();
8563 VmaSuballocation& suballoc = *suballocItem;
8564 if(suballoc.hAllocation == allocation)
8566 FreeSuballocation(suballocItem);
8567 VMA_HEAVY_ASSERT(Validate());
8571 VMA_ASSERT(0 &&
"Not found!");
8574 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8576 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8577 suballocItem != m_Suballocations.end();
8580 VmaSuballocation& suballoc = *suballocItem;
8581 if(suballoc.offset == offset)
8583 FreeSuballocation(suballocItem);
8587 VMA_ASSERT(0 &&
"Not found!");
8590 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
8592 VkDeviceSize lastSize = 0;
8593 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8595 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8597 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8598 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8599 VMA_VALIDATE(it->size >= lastSize);
8600 lastSize = it->size;
8605 bool VmaBlockMetadata_Generic::CheckAllocation(
8606 uint32_t currentFrameIndex,
8607 uint32_t frameInUseCount,
8608 VkDeviceSize bufferImageGranularity,
8609 VkDeviceSize allocSize,
8610 VkDeviceSize allocAlignment,
8611 VmaSuballocationType allocType,
8612 VmaSuballocationList::const_iterator suballocItem,
8613 bool canMakeOtherLost,
8614 VkDeviceSize* pOffset,
8615 size_t* itemsToMakeLostCount,
8616 VkDeviceSize* pSumFreeSize,
8617 VkDeviceSize* pSumItemSize)
const
8619 VMA_ASSERT(allocSize > 0);
8620 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8621 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8622 VMA_ASSERT(pOffset != VMA_NULL);
8624 *itemsToMakeLostCount = 0;
8628 if(canMakeOtherLost)
8630 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8632 *pSumFreeSize = suballocItem->size;
8636 if(suballocItem->hAllocation->CanBecomeLost() &&
8637 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8639 ++*itemsToMakeLostCount;
8640 *pSumItemSize = suballocItem->size;
8649 if(GetSize() - suballocItem->offset < allocSize)
8655 *pOffset = suballocItem->offset;
8658 if(VMA_DEBUG_MARGIN > 0)
8660 *pOffset += VMA_DEBUG_MARGIN;
8664 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8668 if(bufferImageGranularity > 1)
8670 bool bufferImageGranularityConflict =
false;
8671 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8672 while(prevSuballocItem != m_Suballocations.cbegin())
8675 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8676 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8678 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8680 bufferImageGranularityConflict =
true;
8688 if(bufferImageGranularityConflict)
8690 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8696 if(*pOffset >= suballocItem->offset + suballocItem->size)
8702 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8705 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8707 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8709 if(suballocItem->offset + totalSize > GetSize())
8716 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8717 if(totalSize > suballocItem->size)
8719 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8720 while(remainingSize > 0)
8723 if(lastSuballocItem == m_Suballocations.cend())
8727 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8729 *pSumFreeSize += lastSuballocItem->size;
8733 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8734 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8735 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8737 ++*itemsToMakeLostCount;
8738 *pSumItemSize += lastSuballocItem->size;
8745 remainingSize = (lastSuballocItem->size < remainingSize) ?
8746 remainingSize - lastSuballocItem->size : 0;
8752 if(bufferImageGranularity > 1)
8754 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8756 while(nextSuballocItem != m_Suballocations.cend())
8758 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8759 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8761 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8763 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8764 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8765 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8767 ++*itemsToMakeLostCount;
8786 const VmaSuballocation& suballoc = *suballocItem;
8787 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8789 *pSumFreeSize = suballoc.size;
8792 if(suballoc.size < allocSize)
8798 *pOffset = suballoc.offset;
8801 if(VMA_DEBUG_MARGIN > 0)
8803 *pOffset += VMA_DEBUG_MARGIN;
8807 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8811 if(bufferImageGranularity > 1)
8813 bool bufferImageGranularityConflict =
false;
8814 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8815 while(prevSuballocItem != m_Suballocations.cbegin())
8818 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8819 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8821 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8823 bufferImageGranularityConflict =
true;
8831 if(bufferImageGranularityConflict)
8833 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8838 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8841 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8844 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8851 if(bufferImageGranularity > 1)
8853 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8855 while(nextSuballocItem != m_Suballocations.cend())
8857 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8858 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8860 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8879 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8881 VMA_ASSERT(item != m_Suballocations.end());
8882 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8884 VmaSuballocationList::iterator nextItem = item;
8886 VMA_ASSERT(nextItem != m_Suballocations.end());
8887 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8889 item->size += nextItem->size;
8891 m_Suballocations.erase(nextItem);
8894 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8897 VmaSuballocation& suballoc = *suballocItem;
8898 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8899 suballoc.hAllocation = VK_NULL_HANDLE;
8903 m_SumFreeSize += suballoc.size;
8906 bool mergeWithNext =
false;
8907 bool mergeWithPrev =
false;
8909 VmaSuballocationList::iterator nextItem = suballocItem;
8911 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8913 mergeWithNext =
true;
8916 VmaSuballocationList::iterator prevItem = suballocItem;
8917 if(suballocItem != m_Suballocations.begin())
8920 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8922 mergeWithPrev =
true;
8928 UnregisterFreeSuballocation(nextItem);
8929 MergeFreeWithNext(suballocItem);
8934 UnregisterFreeSuballocation(prevItem);
8935 MergeFreeWithNext(prevItem);
8936 RegisterFreeSuballocation(prevItem);
8941 RegisterFreeSuballocation(suballocItem);
8942 return suballocItem;
8946 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8948 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8949 VMA_ASSERT(item->size > 0);
8953 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8955 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8957 if(m_FreeSuballocationsBySize.empty())
8959 m_FreeSuballocationsBySize.push_back(item);
8963 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8971 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8973 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8974 VMA_ASSERT(item->size > 0);
8978 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8980 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8982 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8983 m_FreeSuballocationsBySize.data(),
8984 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8986 VmaSuballocationItemSizeLess());
8987 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8988 index < m_FreeSuballocationsBySize.size();
8991 if(m_FreeSuballocationsBySize[index] == item)
8993 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8996 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8998 VMA_ASSERT(0 &&
"Not found.");
9004 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9005 VkDeviceSize bufferImageGranularity,
9006 VmaSuballocationType& inOutPrevSuballocType)
const
9008 if(bufferImageGranularity == 1 || IsEmpty())
9013 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9014 bool typeConflictFound =
false;
9015 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9016 it != m_Suballocations.cend();
9019 const VmaSuballocationType suballocType = it->type;
9020 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9022 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9023 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9025 typeConflictFound =
true;
9027 inOutPrevSuballocType = suballocType;
9031 return typeConflictFound || minAlignment >= bufferImageGranularity;
9037 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9038 VmaBlockMetadata(hAllocator),
9040 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9041 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9042 m_1stVectorIndex(0),
9043 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9044 m_1stNullItemsBeginCount(0),
9045 m_1stNullItemsMiddleCount(0),
9046 m_2ndNullItemsCount(0)
9050 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9054 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9056 VmaBlockMetadata::Init(size);
9057 m_SumFreeSize = size;
9060 bool VmaBlockMetadata_Linear::Validate()
const
9062 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9063 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9065 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9066 VMA_VALIDATE(!suballocations1st.empty() ||
9067 suballocations2nd.empty() ||
9068 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9070 if(!suballocations1st.empty())
9073 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9075 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9077 if(!suballocations2nd.empty())
9080 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9083 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9084 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9086 VkDeviceSize sumUsedSize = 0;
9087 const size_t suballoc1stCount = suballocations1st.size();
9088 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9090 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9092 const size_t suballoc2ndCount = suballocations2nd.size();
9093 size_t nullItem2ndCount = 0;
9094 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9096 const VmaSuballocation& suballoc = suballocations2nd[i];
9097 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9099 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9100 VMA_VALIDATE(suballoc.offset >= offset);
9104 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9105 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9106 sumUsedSize += suballoc.size;
9113 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9116 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9119 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9121 const VmaSuballocation& suballoc = suballocations1st[i];
9122 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9123 suballoc.hAllocation == VK_NULL_HANDLE);
9126 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9128 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9130 const VmaSuballocation& suballoc = suballocations1st[i];
9131 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9133 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9134 VMA_VALIDATE(suballoc.offset >= offset);
9135 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9139 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9140 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9141 sumUsedSize += suballoc.size;
9148 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9150 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9152 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9154 const size_t suballoc2ndCount = suballocations2nd.size();
9155 size_t nullItem2ndCount = 0;
9156 for(
size_t i = suballoc2ndCount; i--; )
9158 const VmaSuballocation& suballoc = suballocations2nd[i];
9159 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9161 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9162 VMA_VALIDATE(suballoc.offset >= offset);
9166 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9167 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9168 sumUsedSize += suballoc.size;
9175 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9178 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9181 VMA_VALIDATE(offset <= GetSize());
9182 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9187 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9189 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9190 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9193 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9195 const VkDeviceSize size = GetSize();
9207 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9209 switch(m_2ndVectorMode)
9211 case SECOND_VECTOR_EMPTY:
9217 const size_t suballocations1stCount = suballocations1st.size();
9218 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9219 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9220 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9222 firstSuballoc.offset,
9223 size - (lastSuballoc.offset + lastSuballoc.size));
9227 case SECOND_VECTOR_RING_BUFFER:
9232 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9233 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9234 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9235 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9239 case SECOND_VECTOR_DOUBLE_STACK:
9244 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9245 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9246 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9247 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9257 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9259 const VkDeviceSize size = GetSize();
9260 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9261 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9262 const size_t suballoc1stCount = suballocations1st.size();
9263 const size_t suballoc2ndCount = suballocations2nd.size();
9274 VkDeviceSize lastOffset = 0;
9276 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9278 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9279 size_t nextAlloc2ndIndex = 0;
9280 while(lastOffset < freeSpace2ndTo1stEnd)
9283 while(nextAlloc2ndIndex < suballoc2ndCount &&
9284 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9286 ++nextAlloc2ndIndex;
9290 if(nextAlloc2ndIndex < suballoc2ndCount)
9292 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9295 if(lastOffset < suballoc.offset)
9298 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9312 lastOffset = suballoc.offset + suballoc.size;
9313 ++nextAlloc2ndIndex;
9319 if(lastOffset < freeSpace2ndTo1stEnd)
9321 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9329 lastOffset = freeSpace2ndTo1stEnd;
9334 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9335 const VkDeviceSize freeSpace1stTo2ndEnd =
9336 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9337 while(lastOffset < freeSpace1stTo2ndEnd)
9340 while(nextAlloc1stIndex < suballoc1stCount &&
9341 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9343 ++nextAlloc1stIndex;
9347 if(nextAlloc1stIndex < suballoc1stCount)
9349 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9352 if(lastOffset < suballoc.offset)
9355 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9369 lastOffset = suballoc.offset + suballoc.size;
9370 ++nextAlloc1stIndex;
9376 if(lastOffset < freeSpace1stTo2ndEnd)
9378 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9386 lastOffset = freeSpace1stTo2ndEnd;
9390 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9392 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9393 while(lastOffset < size)
9396 while(nextAlloc2ndIndex != SIZE_MAX &&
9397 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9399 --nextAlloc2ndIndex;
9403 if(nextAlloc2ndIndex != SIZE_MAX)
9405 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9408 if(lastOffset < suballoc.offset)
9411 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9425 lastOffset = suballoc.offset + suballoc.size;
9426 --nextAlloc2ndIndex;
9432 if(lastOffset < size)
9434 const VkDeviceSize unusedRangeSize = size - lastOffset;
9450 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
9452 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9453 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9454 const VkDeviceSize size = GetSize();
9455 const size_t suballoc1stCount = suballocations1st.size();
9456 const size_t suballoc2ndCount = suballocations2nd.size();
9458 inoutStats.
size += size;
9460 VkDeviceSize lastOffset = 0;
9462 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9464 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9465 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9466 while(lastOffset < freeSpace2ndTo1stEnd)
9469 while(nextAlloc2ndIndex < suballoc2ndCount &&
9470 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9472 ++nextAlloc2ndIndex;
9476 if(nextAlloc2ndIndex < suballoc2ndCount)
9478 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9481 if(lastOffset < suballoc.offset)
9484 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9495 lastOffset = suballoc.offset + suballoc.size;
9496 ++nextAlloc2ndIndex;
9501 if(lastOffset < freeSpace2ndTo1stEnd)
9504 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9511 lastOffset = freeSpace2ndTo1stEnd;
9516 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9517 const VkDeviceSize freeSpace1stTo2ndEnd =
9518 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9519 while(lastOffset < freeSpace1stTo2ndEnd)
9522 while(nextAlloc1stIndex < suballoc1stCount &&
9523 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9525 ++nextAlloc1stIndex;
9529 if(nextAlloc1stIndex < suballoc1stCount)
9531 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9534 if(lastOffset < suballoc.offset)
9537 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9548 lastOffset = suballoc.offset + suballoc.size;
9549 ++nextAlloc1stIndex;
9554 if(lastOffset < freeSpace1stTo2ndEnd)
9557 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9564 lastOffset = freeSpace1stTo2ndEnd;
9568 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9570 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9571 while(lastOffset < size)
9574 while(nextAlloc2ndIndex != SIZE_MAX &&
9575 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9577 --nextAlloc2ndIndex;
9581 if(nextAlloc2ndIndex != SIZE_MAX)
9583 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9586 if(lastOffset < suballoc.offset)
9589 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9600 lastOffset = suballoc.offset + suballoc.size;
9601 --nextAlloc2ndIndex;
9606 if(lastOffset < size)
9609 const VkDeviceSize unusedRangeSize = size - lastOffset;
9622 #if VMA_STATS_STRING_ENABLED
9623 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
9625 const VkDeviceSize size = GetSize();
9626 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9627 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9628 const size_t suballoc1stCount = suballocations1st.size();
9629 const size_t suballoc2ndCount = suballocations2nd.size();
9633 size_t unusedRangeCount = 0;
9634 VkDeviceSize usedBytes = 0;
9636 VkDeviceSize lastOffset = 0;
9638 size_t alloc2ndCount = 0;
9639 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9641 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9642 size_t nextAlloc2ndIndex = 0;
9643 while(lastOffset < freeSpace2ndTo1stEnd)
9646 while(nextAlloc2ndIndex < suballoc2ndCount &&
9647 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9649 ++nextAlloc2ndIndex;
9653 if(nextAlloc2ndIndex < suballoc2ndCount)
9655 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9658 if(lastOffset < suballoc.offset)
9667 usedBytes += suballoc.size;
9670 lastOffset = suballoc.offset + suballoc.size;
9671 ++nextAlloc2ndIndex;
9676 if(lastOffset < freeSpace2ndTo1stEnd)
9683 lastOffset = freeSpace2ndTo1stEnd;
9688 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9689 size_t alloc1stCount = 0;
9690 const VkDeviceSize freeSpace1stTo2ndEnd =
9691 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9692 while(lastOffset < freeSpace1stTo2ndEnd)
9695 while(nextAlloc1stIndex < suballoc1stCount &&
9696 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9698 ++nextAlloc1stIndex;
9702 if(nextAlloc1stIndex < suballoc1stCount)
9704 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9707 if(lastOffset < suballoc.offset)
9716 usedBytes += suballoc.size;
9719 lastOffset = suballoc.offset + suballoc.size;
9720 ++nextAlloc1stIndex;
9725 if(lastOffset < size)
9732 lastOffset = freeSpace1stTo2ndEnd;
9736 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9738 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9739 while(lastOffset < size)
9742 while(nextAlloc2ndIndex != SIZE_MAX &&
9743 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9745 --nextAlloc2ndIndex;
9749 if(nextAlloc2ndIndex != SIZE_MAX)
9751 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9754 if(lastOffset < suballoc.offset)
9763 usedBytes += suballoc.size;
9766 lastOffset = suballoc.offset + suballoc.size;
9767 --nextAlloc2ndIndex;
9772 if(lastOffset < size)
9784 const VkDeviceSize unusedBytes = size - usedBytes;
9785 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9790 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9792 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9793 size_t nextAlloc2ndIndex = 0;
9794 while(lastOffset < freeSpace2ndTo1stEnd)
9797 while(nextAlloc2ndIndex < suballoc2ndCount &&
9798 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9800 ++nextAlloc2ndIndex;
9804 if(nextAlloc2ndIndex < suballoc2ndCount)
9806 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9809 if(lastOffset < suballoc.offset)
9812 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9813 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9818 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9821 lastOffset = suballoc.offset + suballoc.size;
9822 ++nextAlloc2ndIndex;
9827 if(lastOffset < freeSpace2ndTo1stEnd)
9830 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9831 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9835 lastOffset = freeSpace2ndTo1stEnd;
9840 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9841 while(lastOffset < freeSpace1stTo2ndEnd)
9844 while(nextAlloc1stIndex < suballoc1stCount &&
9845 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9847 ++nextAlloc1stIndex;
9851 if(nextAlloc1stIndex < suballoc1stCount)
9853 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9856 if(lastOffset < suballoc.offset)
9859 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9860 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9865 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9868 lastOffset = suballoc.offset + suballoc.size;
9869 ++nextAlloc1stIndex;
9874 if(lastOffset < freeSpace1stTo2ndEnd)
9877 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9878 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9882 lastOffset = freeSpace1stTo2ndEnd;
9886 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9888 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9889 while(lastOffset < size)
9892 while(nextAlloc2ndIndex != SIZE_MAX &&
9893 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9895 --nextAlloc2ndIndex;
9899 if(nextAlloc2ndIndex != SIZE_MAX)
9901 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9904 if(lastOffset < suballoc.offset)
9907 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9908 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9913 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9916 lastOffset = suballoc.offset + suballoc.size;
9917 --nextAlloc2ndIndex;
9922 if(lastOffset < size)
9925 const VkDeviceSize unusedRangeSize = size - lastOffset;
9926 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9935 PrintDetailedMap_End(json);
9937 #endif // #if VMA_STATS_STRING_ENABLED
9939 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9940 uint32_t currentFrameIndex,
9941 uint32_t frameInUseCount,
9942 VkDeviceSize bufferImageGranularity,
9943 VkDeviceSize allocSize,
9944 VkDeviceSize allocAlignment,
9946 VmaSuballocationType allocType,
9947 bool canMakeOtherLost,
9949 VmaAllocationRequest* pAllocationRequest)
9951 VMA_ASSERT(allocSize > 0);
9952 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9953 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9954 VMA_HEAVY_ASSERT(Validate());
9955 return upperAddress ?
9956 CreateAllocationRequest_UpperAddress(
9957 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9958 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
9959 CreateAllocationRequest_LowerAddress(
9960 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9961 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
9964 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9965 uint32_t currentFrameIndex,
9966 uint32_t frameInUseCount,
9967 VkDeviceSize bufferImageGranularity,
9968 VkDeviceSize allocSize,
9969 VkDeviceSize allocAlignment,
9970 VmaSuballocationType allocType,
9971 bool canMakeOtherLost,
9973 VmaAllocationRequest* pAllocationRequest)
9975 const VkDeviceSize size = GetSize();
9976 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9977 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9979 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9981 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9986 if(allocSize > size)
9990 VkDeviceSize resultBaseOffset = size - allocSize;
9991 if(!suballocations2nd.empty())
9993 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9994 resultBaseOffset = lastSuballoc.offset - allocSize;
9995 if(allocSize > lastSuballoc.offset)
10002 VkDeviceSize resultOffset = resultBaseOffset;
10005 if(VMA_DEBUG_MARGIN > 0)
10007 if(resultOffset < VMA_DEBUG_MARGIN)
10011 resultOffset -= VMA_DEBUG_MARGIN;
10015 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10019 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10021 bool bufferImageGranularityConflict =
false;
10022 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10024 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10025 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10027 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10029 bufferImageGranularityConflict =
true;
10037 if(bufferImageGranularityConflict)
10039 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10044 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10045 suballocations1st.back().offset + suballocations1st.back().size :
10047 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10051 if(bufferImageGranularity > 1)
10053 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10055 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10056 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10058 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10072 pAllocationRequest->offset = resultOffset;
10073 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10074 pAllocationRequest->sumItemSize = 0;
10076 pAllocationRequest->itemsToMakeLostCount = 0;
10077 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10084 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10085 uint32_t currentFrameIndex,
10086 uint32_t frameInUseCount,
10087 VkDeviceSize bufferImageGranularity,
10088 VkDeviceSize allocSize,
10089 VkDeviceSize allocAlignment,
10090 VmaSuballocationType allocType,
10091 bool canMakeOtherLost,
10093 VmaAllocationRequest* pAllocationRequest)
10095 const VkDeviceSize size = GetSize();
10096 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10097 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10099 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10103 VkDeviceSize resultBaseOffset = 0;
10104 if(!suballocations1st.empty())
10106 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10107 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10111 VkDeviceSize resultOffset = resultBaseOffset;
10114 if(VMA_DEBUG_MARGIN > 0)
10116 resultOffset += VMA_DEBUG_MARGIN;
10120 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10124 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10126 bool bufferImageGranularityConflict =
false;
10127 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10129 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10130 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10132 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10134 bufferImageGranularityConflict =
true;
10142 if(bufferImageGranularityConflict)
10144 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10148 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10149 suballocations2nd.back().offset : size;
10152 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10156 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10158 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10160 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10161 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10163 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10177 pAllocationRequest->offset = resultOffset;
10178 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10179 pAllocationRequest->sumItemSize = 0;
10181 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10182 pAllocationRequest->itemsToMakeLostCount = 0;
10189 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10191 VMA_ASSERT(!suballocations1st.empty());
10193 VkDeviceSize resultBaseOffset = 0;
10194 if(!suballocations2nd.empty())
10196 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10197 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10201 VkDeviceSize resultOffset = resultBaseOffset;
10204 if(VMA_DEBUG_MARGIN > 0)
10206 resultOffset += VMA_DEBUG_MARGIN;
10210 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10214 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10216 bool bufferImageGranularityConflict =
false;
10217 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10219 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10220 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10222 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10224 bufferImageGranularityConflict =
true;
10232 if(bufferImageGranularityConflict)
10234 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10238 pAllocationRequest->itemsToMakeLostCount = 0;
10239 pAllocationRequest->sumItemSize = 0;
10240 size_t index1st = m_1stNullItemsBeginCount;
10242 if(canMakeOtherLost)
10244 while(index1st < suballocations1st.size() &&
10245 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10248 const VmaSuballocation& suballoc = suballocations1st[index1st];
10249 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10255 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10256 if(suballoc.hAllocation->CanBecomeLost() &&
10257 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10259 ++pAllocationRequest->itemsToMakeLostCount;
10260 pAllocationRequest->sumItemSize += suballoc.size;
10272 if(bufferImageGranularity > 1)
10274 while(index1st < suballocations1st.size())
10276 const VmaSuballocation& suballoc = suballocations1st[index1st];
10277 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10279 if(suballoc.hAllocation != VK_NULL_HANDLE)
10282 if(suballoc.hAllocation->CanBecomeLost() &&
10283 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10285 ++pAllocationRequest->itemsToMakeLostCount;
10286 pAllocationRequest->sumItemSize += suballoc.size;
10304 if(index1st == suballocations1st.size() &&
10305 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10308 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10313 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10314 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10318 if(bufferImageGranularity > 1)
10320 for(
size_t nextSuballocIndex = index1st;
10321 nextSuballocIndex < suballocations1st.size();
10322 nextSuballocIndex++)
10324 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10325 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10327 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10341 pAllocationRequest->offset = resultOffset;
10342 pAllocationRequest->sumFreeSize =
10343 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10345 - pAllocationRequest->sumItemSize;
10346 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10355 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10356 uint32_t currentFrameIndex,
10357 uint32_t frameInUseCount,
10358 VmaAllocationRequest* pAllocationRequest)
10360 if(pAllocationRequest->itemsToMakeLostCount == 0)
10365 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10368 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10369 size_t index = m_1stNullItemsBeginCount;
10370 size_t madeLostCount = 0;
10371 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10373 if(index == suballocations->size())
10377 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10379 suballocations = &AccessSuballocations2nd();
10383 VMA_ASSERT(!suballocations->empty());
10385 VmaSuballocation& suballoc = (*suballocations)[index];
10386 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10388 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10389 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10390 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10392 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10393 suballoc.hAllocation = VK_NULL_HANDLE;
10394 m_SumFreeSize += suballoc.size;
10395 if(suballocations == &AccessSuballocations1st())
10397 ++m_1stNullItemsMiddleCount;
10401 ++m_2ndNullItemsCount;
10413 CleanupAfterFree();
10419 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10421 uint32_t lostAllocationCount = 0;
10423 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10424 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10426 VmaSuballocation& suballoc = suballocations1st[i];
10427 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10428 suballoc.hAllocation->CanBecomeLost() &&
10429 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10431 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10432 suballoc.hAllocation = VK_NULL_HANDLE;
10433 ++m_1stNullItemsMiddleCount;
10434 m_SumFreeSize += suballoc.size;
10435 ++lostAllocationCount;
10439 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10440 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10442 VmaSuballocation& suballoc = suballocations2nd[i];
10443 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10444 suballoc.hAllocation->CanBecomeLost() &&
10445 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10447 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10448 suballoc.hAllocation = VK_NULL_HANDLE;
10449 ++m_2ndNullItemsCount;
10450 m_SumFreeSize += suballoc.size;
10451 ++lostAllocationCount;
10455 if(lostAllocationCount)
10457 CleanupAfterFree();
10460 return lostAllocationCount;
10463 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10465 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10466 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10468 const VmaSuballocation& suballoc = suballocations1st[i];
10469 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10471 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10473 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10474 return VK_ERROR_VALIDATION_FAILED_EXT;
10476 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10478 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10479 return VK_ERROR_VALIDATION_FAILED_EXT;
10484 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10485 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10487 const VmaSuballocation& suballoc = suballocations2nd[i];
10488 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10490 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10492 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10493 return VK_ERROR_VALIDATION_FAILED_EXT;
10495 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10497 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10498 return VK_ERROR_VALIDATION_FAILED_EXT;
10506 void VmaBlockMetadata_Linear::Alloc(
10507 const VmaAllocationRequest& request,
10508 VmaSuballocationType type,
10509 VkDeviceSize allocSize,
10512 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10514 switch(request.type)
10516 case VmaAllocationRequestType::UpperAddress:
10518 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10519 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10520 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10521 suballocations2nd.push_back(newSuballoc);
10522 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10525 case VmaAllocationRequestType::EndOf1st:
10527 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10529 VMA_ASSERT(suballocations1st.empty() ||
10530 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10532 VMA_ASSERT(request.offset + allocSize <= GetSize());
10534 suballocations1st.push_back(newSuballoc);
10537 case VmaAllocationRequestType::EndOf2nd:
10539 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10541 VMA_ASSERT(!suballocations1st.empty() &&
10542 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10543 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10545 switch(m_2ndVectorMode)
10547 case SECOND_VECTOR_EMPTY:
10549 VMA_ASSERT(suballocations2nd.empty());
10550 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10552 case SECOND_VECTOR_RING_BUFFER:
10554 VMA_ASSERT(!suballocations2nd.empty());
10556 case SECOND_VECTOR_DOUBLE_STACK:
10557 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10563 suballocations2nd.push_back(newSuballoc);
10567 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10570 m_SumFreeSize -= newSuballoc.size;
10573 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10575 FreeAtOffset(allocation->GetOffset());
10578 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10580 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10581 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10583 if(!suballocations1st.empty())
10586 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10587 if(firstSuballoc.offset == offset)
10589 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10590 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10591 m_SumFreeSize += firstSuballoc.size;
10592 ++m_1stNullItemsBeginCount;
10593 CleanupAfterFree();
10599 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10600 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10602 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10603 if(lastSuballoc.offset == offset)
10605 m_SumFreeSize += lastSuballoc.size;
10606 suballocations2nd.pop_back();
10607 CleanupAfterFree();
10612 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10614 VmaSuballocation& lastSuballoc = suballocations1st.back();
10615 if(lastSuballoc.offset == offset)
10617 m_SumFreeSize += lastSuballoc.size;
10618 suballocations1st.pop_back();
10619 CleanupAfterFree();
10626 VmaSuballocation refSuballoc;
10627 refSuballoc.offset = offset;
10629 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10630 suballocations1st.begin() + m_1stNullItemsBeginCount,
10631 suballocations1st.end(),
10633 VmaSuballocationOffsetLess());
10634 if(it != suballocations1st.end())
10636 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10637 it->hAllocation = VK_NULL_HANDLE;
10638 ++m_1stNullItemsMiddleCount;
10639 m_SumFreeSize += it->size;
10640 CleanupAfterFree();
10645 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10648 VmaSuballocation refSuballoc;
10649 refSuballoc.offset = offset;
10651 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10652 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10653 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10654 if(it != suballocations2nd.end())
10656 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10657 it->hAllocation = VK_NULL_HANDLE;
10658 ++m_2ndNullItemsCount;
10659 m_SumFreeSize += it->size;
10660 CleanupAfterFree();
10665 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10668 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
10670 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10671 const size_t suballocCount = AccessSuballocations1st().size();
10672 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10675 void VmaBlockMetadata_Linear::CleanupAfterFree()
10677 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10678 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10682 suballocations1st.clear();
10683 suballocations2nd.clear();
10684 m_1stNullItemsBeginCount = 0;
10685 m_1stNullItemsMiddleCount = 0;
10686 m_2ndNullItemsCount = 0;
10687 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10691 const size_t suballoc1stCount = suballocations1st.size();
10692 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10693 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10696 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10697 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10699 ++m_1stNullItemsBeginCount;
10700 --m_1stNullItemsMiddleCount;
10704 while(m_1stNullItemsMiddleCount > 0 &&
10705 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10707 --m_1stNullItemsMiddleCount;
10708 suballocations1st.pop_back();
10712 while(m_2ndNullItemsCount > 0 &&
10713 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10715 --m_2ndNullItemsCount;
10716 suballocations2nd.pop_back();
10720 while(m_2ndNullItemsCount > 0 &&
10721 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10723 --m_2ndNullItemsCount;
10724 VmaVectorRemove(suballocations2nd, 0);
10727 if(ShouldCompact1st())
10729 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10730 size_t srcIndex = m_1stNullItemsBeginCount;
10731 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10733 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10737 if(dstIndex != srcIndex)
10739 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10743 suballocations1st.resize(nonNullItemCount);
10744 m_1stNullItemsBeginCount = 0;
10745 m_1stNullItemsMiddleCount = 0;
10749 if(suballocations2nd.empty())
10751 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10755 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10757 suballocations1st.clear();
10758 m_1stNullItemsBeginCount = 0;
10760 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10763 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10764 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10765 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10766 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10768 ++m_1stNullItemsBeginCount;
10769 --m_1stNullItemsMiddleCount;
10771 m_2ndNullItemsCount = 0;
10772 m_1stVectorIndex ^= 1;
10777 VMA_HEAVY_ASSERT(Validate());
10784 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10785 VmaBlockMetadata(hAllocator),
10787 m_AllocationCount(0),
10791 memset(m_FreeList, 0,
sizeof(m_FreeList));
10794 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10796 DeleteNode(m_Root);
10799 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10801 VmaBlockMetadata::Init(size);
10803 m_UsableSize = VmaPrevPow2(size);
10804 m_SumFreeSize = m_UsableSize;
10808 while(m_LevelCount < MAX_LEVELS &&
10809 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10814 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10815 rootNode->offset = 0;
10816 rootNode->type = Node::TYPE_FREE;
10817 rootNode->parent = VMA_NULL;
10818 rootNode->buddy = VMA_NULL;
10821 AddToFreeListFront(0, rootNode);
10824 bool VmaBlockMetadata_Buddy::Validate()
const
10827 ValidationContext ctx;
10828 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10830 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10832 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10833 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10836 for(uint32_t level = 0; level < m_LevelCount; ++level)
10838 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10839 m_FreeList[level].front->free.prev == VMA_NULL);
10841 for(Node* node = m_FreeList[level].front;
10843 node = node->free.next)
10845 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10847 if(node->free.next == VMA_NULL)
10849 VMA_VALIDATE(m_FreeList[level].back == node);
10853 VMA_VALIDATE(node->free.next->free.prev == node);
10859 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10861 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10867 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
10869 for(uint32_t level = 0; level < m_LevelCount; ++level)
10871 if(m_FreeList[level].front != VMA_NULL)
10873 return LevelToNodeSize(level);
10879 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10881 const VkDeviceSize unusableSize = GetUnusableSize();
10892 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10894 if(unusableSize > 0)
10903 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
10905 const VkDeviceSize unusableSize = GetUnusableSize();
10907 inoutStats.
size += GetSize();
10908 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10913 if(unusableSize > 0)
10920 #if VMA_STATS_STRING_ENABLED
10922 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
10926 CalcAllocationStatInfo(stat);
10928 PrintDetailedMap_Begin(
10934 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10936 const VkDeviceSize unusableSize = GetUnusableSize();
10937 if(unusableSize > 0)
10939 PrintDetailedMap_UnusedRange(json,
10944 PrintDetailedMap_End(json);
10947 #endif // #if VMA_STATS_STRING_ENABLED
10949 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10950 uint32_t currentFrameIndex,
10951 uint32_t frameInUseCount,
10952 VkDeviceSize bufferImageGranularity,
10953 VkDeviceSize allocSize,
10954 VkDeviceSize allocAlignment,
10956 VmaSuballocationType allocType,
10957 bool canMakeOtherLost,
10959 VmaAllocationRequest* pAllocationRequest)
10961 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10965 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10966 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10967 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10969 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10970 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10973 if(allocSize > m_UsableSize)
10978 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10979 for(uint32_t level = targetLevel + 1; level--; )
10981 for(Node* freeNode = m_FreeList[level].front;
10982 freeNode != VMA_NULL;
10983 freeNode = freeNode->free.next)
10985 if(freeNode->offset % allocAlignment == 0)
10987 pAllocationRequest->type = VmaAllocationRequestType::Normal;
10988 pAllocationRequest->offset = freeNode->offset;
10989 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10990 pAllocationRequest->sumItemSize = 0;
10991 pAllocationRequest->itemsToMakeLostCount = 0;
10992 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11001 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11002 uint32_t currentFrameIndex,
11003 uint32_t frameInUseCount,
11004 VmaAllocationRequest* pAllocationRequest)
11010 return pAllocationRequest->itemsToMakeLostCount == 0;
11013 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11022 void VmaBlockMetadata_Buddy::Alloc(
11023 const VmaAllocationRequest& request,
11024 VmaSuballocationType type,
11025 VkDeviceSize allocSize,
11028 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11030 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11031 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11033 Node* currNode = m_FreeList[currLevel].front;
11034 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11035 while(currNode->offset != request.offset)
11037 currNode = currNode->free.next;
11038 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11042 while(currLevel < targetLevel)
11046 RemoveFromFreeList(currLevel, currNode);
11048 const uint32_t childrenLevel = currLevel + 1;
11051 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11052 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11054 leftChild->offset = currNode->offset;
11055 leftChild->type = Node::TYPE_FREE;
11056 leftChild->parent = currNode;
11057 leftChild->buddy = rightChild;
11059 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11060 rightChild->type = Node::TYPE_FREE;
11061 rightChild->parent = currNode;
11062 rightChild->buddy = leftChild;
11065 currNode->type = Node::TYPE_SPLIT;
11066 currNode->split.leftChild = leftChild;
11069 AddToFreeListFront(childrenLevel, rightChild);
11070 AddToFreeListFront(childrenLevel, leftChild);
11075 currNode = m_FreeList[currLevel].front;
11084 VMA_ASSERT(currLevel == targetLevel &&
11085 currNode != VMA_NULL &&
11086 currNode->type == Node::TYPE_FREE);
11087 RemoveFromFreeList(currLevel, currNode);
11090 currNode->type = Node::TYPE_ALLOCATION;
11091 currNode->allocation.alloc = hAllocation;
11093 ++m_AllocationCount;
11095 m_SumFreeSize -= allocSize;
11098 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11100 if(node->type == Node::TYPE_SPLIT)
11102 DeleteNode(node->split.leftChild->buddy);
11103 DeleteNode(node->split.leftChild);
11106 vma_delete(GetAllocationCallbacks(), node);
11109 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11111 VMA_VALIDATE(level < m_LevelCount);
11112 VMA_VALIDATE(curr->parent == parent);
11113 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11114 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11117 case Node::TYPE_FREE:
11119 ctx.calculatedSumFreeSize += levelNodeSize;
11120 ++ctx.calculatedFreeCount;
11122 case Node::TYPE_ALLOCATION:
11123 ++ctx.calculatedAllocationCount;
11124 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11125 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11127 case Node::TYPE_SPLIT:
11129 const uint32_t childrenLevel = level + 1;
11130 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11131 const Node*
const leftChild = curr->split.leftChild;
11132 VMA_VALIDATE(leftChild != VMA_NULL);
11133 VMA_VALIDATE(leftChild->offset == curr->offset);
11134 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11136 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11138 const Node*
const rightChild = leftChild->buddy;
11139 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11140 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11142 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11153 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11156 uint32_t level = 0;
11157 VkDeviceSize currLevelNodeSize = m_UsableSize;
11158 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11159 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11162 currLevelNodeSize = nextLevelNodeSize;
11163 nextLevelNodeSize = currLevelNodeSize >> 1;
11168 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11171 Node* node = m_Root;
11172 VkDeviceSize nodeOffset = 0;
11173 uint32_t level = 0;
11174 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11175 while(node->type == Node::TYPE_SPLIT)
11177 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11178 if(offset < nodeOffset + nextLevelSize)
11180 node = node->split.leftChild;
11184 node = node->split.leftChild->buddy;
11185 nodeOffset += nextLevelSize;
11188 levelNodeSize = nextLevelSize;
11191 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11192 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11195 --m_AllocationCount;
11196 m_SumFreeSize += alloc->GetSize();
11198 node->type = Node::TYPE_FREE;
11201 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11203 RemoveFromFreeList(level, node->buddy);
11204 Node*
const parent = node->parent;
11206 vma_delete(GetAllocationCallbacks(), node->buddy);
11207 vma_delete(GetAllocationCallbacks(), node);
11208 parent->type = Node::TYPE_FREE;
11216 AddToFreeListFront(level, node);
11219 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11223 case Node::TYPE_FREE:
11229 case Node::TYPE_ALLOCATION:
11231 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11237 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11238 if(unusedRangeSize > 0)
11247 case Node::TYPE_SPLIT:
11249 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11250 const Node*
const leftChild = node->split.leftChild;
11251 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11252 const Node*
const rightChild = leftChild->buddy;
11253 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11261 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11263 VMA_ASSERT(node->type == Node::TYPE_FREE);
11266 Node*
const frontNode = m_FreeList[level].front;
11267 if(frontNode == VMA_NULL)
11269 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11270 node->free.prev = node->free.next = VMA_NULL;
11271 m_FreeList[level].front = m_FreeList[level].back = node;
11275 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11276 node->free.prev = VMA_NULL;
11277 node->free.next = frontNode;
11278 frontNode->free.prev = node;
11279 m_FreeList[level].front = node;
11283 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11285 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11288 if(node->free.prev == VMA_NULL)
11290 VMA_ASSERT(m_FreeList[level].front == node);
11291 m_FreeList[level].front = node->free.next;
11295 Node*
const prevFreeNode = node->free.prev;
11296 VMA_ASSERT(prevFreeNode->free.next == node);
11297 prevFreeNode->free.next = node->free.next;
11301 if(node->free.next == VMA_NULL)
11303 VMA_ASSERT(m_FreeList[level].back == node);
11304 m_FreeList[level].back = node->free.prev;
11308 Node*
const nextFreeNode = node->free.next;
11309 VMA_ASSERT(nextFreeNode->free.prev == node);
11310 nextFreeNode->free.prev = node->free.prev;
11314 #if VMA_STATS_STRING_ENABLED
11315 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
11319 case Node::TYPE_FREE:
11320 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11322 case Node::TYPE_ALLOCATION:
11324 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11325 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11326 if(allocSize < levelNodeSize)
11328 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11332 case Node::TYPE_SPLIT:
11334 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11335 const Node*
const leftChild = node->split.leftChild;
11336 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11337 const Node*
const rightChild = leftChild->buddy;
11338 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11345 #endif // #if VMA_STATS_STRING_ENABLED
11351 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11352 m_pMetadata(VMA_NULL),
11353 m_MemoryTypeIndex(UINT32_MAX),
11355 m_hMemory(VK_NULL_HANDLE),
11357 m_pMappedData(VMA_NULL)
11361 void VmaDeviceMemoryBlock::Init(
11364 uint32_t newMemoryTypeIndex,
11365 VkDeviceMemory newMemory,
11366 VkDeviceSize newSize,
11368 uint32_t algorithm)
11370 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11372 m_hParentPool = hParentPool;
11373 m_MemoryTypeIndex = newMemoryTypeIndex;
11375 m_hMemory = newMemory;
11380 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11383 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11389 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11391 m_pMetadata->Init(newSize);
11394 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11398 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11400 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11401 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11402 m_hMemory = VK_NULL_HANDLE;
11404 vma_delete(allocator, m_pMetadata);
11405 m_pMetadata = VMA_NULL;
11408 bool VmaDeviceMemoryBlock::Validate()
const
11410 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11411 (m_pMetadata->GetSize() != 0));
11413 return m_pMetadata->Validate();
11416 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11418 void* pData =
nullptr;
11419 VkResult res = Map(hAllocator, 1, &pData);
11420 if(res != VK_SUCCESS)
11425 res = m_pMetadata->CheckCorruption(pData);
11427 Unmap(hAllocator, 1);
11432 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11439 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11440 if(m_MapCount != 0)
11442 m_MapCount += count;
11443 VMA_ASSERT(m_pMappedData != VMA_NULL);
11444 if(ppData != VMA_NULL)
11446 *ppData = m_pMappedData;
11452 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11453 hAllocator->m_hDevice,
11459 if(result == VK_SUCCESS)
11461 if(ppData != VMA_NULL)
11463 *ppData = m_pMappedData;
11465 m_MapCount = count;
11471 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11478 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11479 if(m_MapCount >= count)
11481 m_MapCount -= count;
11482 if(m_MapCount == 0)
11484 m_pMappedData = VMA_NULL;
11485 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11490 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11494 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11496 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11497 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11500 VkResult res = Map(hAllocator, 1, &pData);
11501 if(res != VK_SUCCESS)
11506 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11507 VmaWriteMagicValue(pData, allocOffset + allocSize);
11509 Unmap(hAllocator, 1);
11514 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11516 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11517 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11520 VkResult res = Map(hAllocator, 1, &pData);
11521 if(res != VK_SUCCESS)
11526 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11528 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11530 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11532 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11535 Unmap(hAllocator, 1);
11540 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11543 VkDeviceSize allocationLocalOffset,
11547 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11548 hAllocation->GetBlock() ==
this);
11549 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11550 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11551 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11553 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11554 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11557 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11560 VkDeviceSize allocationLocalOffset,
11564 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11565 hAllocation->GetBlock() ==
this);
11566 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11567 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11568 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11570 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11571 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11576 memset(&outInfo, 0,
sizeof(outInfo));
11595 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11603 VmaPool_T::VmaPool_T(
11606 VkDeviceSize preferredBlockSize) :
11610 createInfo.memoryTypeIndex,
11611 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11612 createInfo.minBlockCount,
11613 createInfo.maxBlockCount,
11615 createInfo.frameInUseCount,
11617 createInfo.blockSize != 0,
11623 VmaPool_T::~VmaPool_T()
11627 #if VMA_STATS_STRING_ENABLED
11629 #endif // #if VMA_STATS_STRING_ENABLED
11631 VmaBlockVector::VmaBlockVector(
11634 uint32_t memoryTypeIndex,
11635 VkDeviceSize preferredBlockSize,
11636 size_t minBlockCount,
11637 size_t maxBlockCount,
11638 VkDeviceSize bufferImageGranularity,
11639 uint32_t frameInUseCount,
11641 bool explicitBlockSize,
11642 uint32_t algorithm) :
11643 m_hAllocator(hAllocator),
11644 m_hParentPool(hParentPool),
11645 m_MemoryTypeIndex(memoryTypeIndex),
11646 m_PreferredBlockSize(preferredBlockSize),
11647 m_MinBlockCount(minBlockCount),
11648 m_MaxBlockCount(maxBlockCount),
11649 m_BufferImageGranularity(bufferImageGranularity),
11650 m_FrameInUseCount(frameInUseCount),
11651 m_IsCustomPool(isCustomPool),
11652 m_ExplicitBlockSize(explicitBlockSize),
11653 m_Algorithm(algorithm),
11654 m_HasEmptyBlock(false),
11655 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11660 VmaBlockVector::~VmaBlockVector()
11662 for(
size_t i = m_Blocks.size(); i--; )
11664 m_Blocks[i]->Destroy(m_hAllocator);
11665 vma_delete(m_hAllocator, m_Blocks[i]);
11669 VkResult VmaBlockVector::CreateMinBlocks()
11671 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11673 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11674 if(res != VK_SUCCESS)
11682 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11684 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11686 const size_t blockCount = m_Blocks.size();
11695 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11697 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11698 VMA_ASSERT(pBlock);
11699 VMA_HEAVY_ASSERT(pBlock->Validate());
11700 pBlock->m_pMetadata->AddPoolStats(*pStats);
11704 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
11706 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11707 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11708 (VMA_DEBUG_MARGIN > 0) &&
11710 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11713 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11715 VkResult VmaBlockVector::Allocate(
11716 uint32_t currentFrameIndex,
11718 VkDeviceSize alignment,
11720 VmaSuballocationType suballocType,
11721 size_t allocationCount,
11725 VkResult res = VK_SUCCESS;
11727 if(IsCorruptionDetectionEnabled())
11729 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11730 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11734 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11735 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11737 res = AllocatePage(
11743 pAllocations + allocIndex);
11744 if(res != VK_SUCCESS)
11751 if(res != VK_SUCCESS)
11754 while(allocIndex--)
11756 Free(pAllocations[allocIndex]);
11758 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11764 VkResult VmaBlockVector::AllocatePage(
11765 uint32_t currentFrameIndex,
11767 VkDeviceSize alignment,
11769 VmaSuballocationType suballocType,
11778 VkDeviceSize freeMemory;
11780 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
11782 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
11786 const bool canCreateNewBlock =
11788 (m_Blocks.size() < m_MaxBlockCount) &&
11789 freeMemory >= size;
11796 canMakeOtherLost =
false;
11800 if(isUpperAddress &&
11803 return VK_ERROR_FEATURE_NOT_PRESENT;
11817 return VK_ERROR_FEATURE_NOT_PRESENT;
11821 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11823 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11831 if(!canMakeOtherLost || canCreateNewBlock)
11840 if(!m_Blocks.empty())
11842 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11843 VMA_ASSERT(pCurrBlock);
11844 VkResult res = AllocateFromBlock(
11854 if(res == VK_SUCCESS)
11856 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
11866 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11868 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11869 VMA_ASSERT(pCurrBlock);
11870 VkResult res = AllocateFromBlock(
11880 if(res == VK_SUCCESS)
11882 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
11890 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11892 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11893 VMA_ASSERT(pCurrBlock);
11894 VkResult res = AllocateFromBlock(
11904 if(res == VK_SUCCESS)
11906 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
11914 if(canCreateNewBlock)
11917 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11918 uint32_t newBlockSizeShift = 0;
11919 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11921 if(!m_ExplicitBlockSize)
11924 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11925 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11927 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11928 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11930 newBlockSize = smallerNewBlockSize;
11931 ++newBlockSizeShift;
11940 size_t newBlockIndex = 0;
11941 VkResult res = newBlockSize <= freeMemory ?
11942 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
11944 if(!m_ExplicitBlockSize)
11946 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11948 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11949 if(smallerNewBlockSize >= size)
11951 newBlockSize = smallerNewBlockSize;
11952 ++newBlockSizeShift;
11953 res = newBlockSize <= freeMemory ?
11954 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
11963 if(res == VK_SUCCESS)
11965 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11966 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11968 res = AllocateFromBlock(
11978 if(res == VK_SUCCESS)
11980 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
11986 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11993 if(canMakeOtherLost)
11995 uint32_t tryIndex = 0;
11996 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11998 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11999 VmaAllocationRequest bestRequest = {};
12000 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12006 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12008 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12009 VMA_ASSERT(pCurrBlock);
12010 VmaAllocationRequest currRequest = {};
12011 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12014 m_BufferImageGranularity,
12023 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12024 if(pBestRequestBlock == VMA_NULL ||
12025 currRequestCost < bestRequestCost)
12027 pBestRequestBlock = pCurrBlock;
12028 bestRequest = currRequest;
12029 bestRequestCost = currRequestCost;
12031 if(bestRequestCost == 0)
12042 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12044 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12045 VMA_ASSERT(pCurrBlock);
12046 VmaAllocationRequest currRequest = {};
12047 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12050 m_BufferImageGranularity,
12059 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12060 if(pBestRequestBlock == VMA_NULL ||
12061 currRequestCost < bestRequestCost ||
12064 pBestRequestBlock = pCurrBlock;
12065 bestRequest = currRequest;
12066 bestRequestCost = currRequestCost;
12068 if(bestRequestCost == 0 ||
12078 if(pBestRequestBlock != VMA_NULL)
12082 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12083 if(res != VK_SUCCESS)
12089 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12095 if(pBestRequestBlock->m_pMetadata->IsEmpty())
12097 m_HasEmptyBlock =
false;
12100 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12101 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12102 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12103 (*pAllocation)->InitBlockAllocation(
12105 bestRequest.offset,
12111 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12112 VMA_DEBUG_LOG(
" Returned from existing block");
12113 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12114 m_hAllocator->m_Budget.m_AllocationBytes[m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex)] += size;
12115 ++m_hAllocator->m_Budget.m_OperationsSinceBudgetFetch;
12116 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12118 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12120 if(IsCorruptionDetectionEnabled())
12122 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12123 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12138 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12140 return VK_ERROR_TOO_MANY_OBJECTS;
12144 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12147 void VmaBlockVector::Free(
12150 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12154 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12156 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12158 if(IsCorruptionDetectionEnabled())
12160 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12161 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12164 if(hAllocation->IsPersistentMap())
12166 pBlock->Unmap(m_hAllocator, 1);
12169 pBlock->m_pMetadata->Free(hAllocation);
12170 VMA_HEAVY_ASSERT(pBlock->Validate());
12172 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12175 if(pBlock->m_pMetadata->IsEmpty())
12178 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
12180 pBlockToDelete = pBlock;
12186 m_HasEmptyBlock =
true;
12191 else if(m_HasEmptyBlock)
12193 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12194 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
12196 pBlockToDelete = pLastBlock;
12197 m_Blocks.pop_back();
12198 m_HasEmptyBlock =
false;
12202 IncrementallySortBlocks();
12207 if(pBlockToDelete != VMA_NULL)
12209 VMA_DEBUG_LOG(
" Deleted empty allocation");
12210 pBlockToDelete->Destroy(m_hAllocator);
12211 vma_delete(m_hAllocator, pBlockToDelete);
12215 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12217 VkDeviceSize result = 0;
12218 for(
size_t i = m_Blocks.size(); i--; )
12220 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12221 if(result >= m_PreferredBlockSize)
12229 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12231 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12233 if(m_Blocks[blockIndex] == pBlock)
12235 VmaVectorRemove(m_Blocks, blockIndex);
12242 void VmaBlockVector::IncrementallySortBlocks()
12247 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12249 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12251 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12258 VkResult VmaBlockVector::AllocateFromBlock(
12259 VmaDeviceMemoryBlock* pBlock,
12260 uint32_t currentFrameIndex,
12262 VkDeviceSize alignment,
12265 VmaSuballocationType suballocType,
12274 VmaAllocationRequest currRequest = {};
12275 if(pBlock->m_pMetadata->CreateAllocationRequest(
12278 m_BufferImageGranularity,
12288 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12292 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12293 if(res != VK_SUCCESS)
12300 if(pBlock->m_pMetadata->IsEmpty())
12302 m_HasEmptyBlock =
false;
12305 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12306 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12307 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12308 (*pAllocation)->InitBlockAllocation(
12310 currRequest.offset,
12316 VMA_HEAVY_ASSERT(pBlock->Validate());
12317 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12318 m_hAllocator->m_Budget.m_AllocationBytes[m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex)] += size;
12319 ++m_hAllocator->m_Budget.m_OperationsSinceBudgetFetch;
12320 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12322 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12324 if(IsCorruptionDetectionEnabled())
12326 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12327 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12331 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12334 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12336 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12337 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12338 allocInfo.allocationSize = blockSize;
12339 VkDeviceMemory mem = VK_NULL_HANDLE;
12340 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12349 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12355 allocInfo.allocationSize,
12359 m_Blocks.push_back(pBlock);
12360 if(pNewBlockIndex != VMA_NULL)
12362 *pNewBlockIndex = m_Blocks.size() - 1;
12368 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12369 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12370 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12372 const size_t blockCount = m_Blocks.size();
12373 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12377 BLOCK_FLAG_USED = 0x00000001,
12378 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12386 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12387 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12388 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12391 const size_t moveCount = moves.size();
12392 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12394 const VmaDefragmentationMove& move = moves[moveIndex];
12395 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12396 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12399 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12402 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12404 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12405 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12406 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12408 currBlockInfo.pMappedData = pBlock->GetMappedData();
12410 if(currBlockInfo.pMappedData == VMA_NULL)
12412 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12413 if(pDefragCtx->res == VK_SUCCESS)
12415 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12422 if(pDefragCtx->res == VK_SUCCESS)
12424 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12425 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12427 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12429 const VmaDefragmentationMove& move = moves[moveIndex];
12431 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12432 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12434 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12439 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12440 memRange.memory = pSrcBlock->GetDeviceMemory();
12441 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12442 memRange.size = VMA_MIN(
12443 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12444 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12445 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12450 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12451 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12452 static_cast<size_t>(move.size));
12454 if(IsCorruptionDetectionEnabled())
12456 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12457 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12463 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12464 memRange.memory = pDstBlock->GetDeviceMemory();
12465 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12466 memRange.size = VMA_MIN(
12467 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12468 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12469 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12476 for(
size_t blockIndex = blockCount; blockIndex--; )
12478 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12479 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12481 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12482 pBlock->Unmap(m_hAllocator, 1);
12487 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12488 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12489 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12490 VkCommandBuffer commandBuffer)
12492 const size_t blockCount = m_Blocks.size();
12494 pDefragCtx->blockContexts.resize(blockCount);
12495 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12498 const size_t moveCount = moves.size();
12499 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12501 const VmaDefragmentationMove& move = moves[moveIndex];
12502 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12503 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12506 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12510 VkBufferCreateInfo bufCreateInfo;
12511 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12513 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12515 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12516 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12517 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12519 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12520 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12521 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12522 if(pDefragCtx->res == VK_SUCCESS)
12524 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12525 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12532 if(pDefragCtx->res == VK_SUCCESS)
12534 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12536 const VmaDefragmentationMove& move = moves[moveIndex];
12538 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12539 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12541 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12543 VkBufferCopy region = {
12547 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12548 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12553 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12555 pDefragCtx->res = VK_NOT_READY;
12561 m_HasEmptyBlock =
false;
12562 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12564 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12565 if(pBlock->m_pMetadata->IsEmpty())
12567 if(m_Blocks.size() > m_MinBlockCount)
12569 if(pDefragmentationStats != VMA_NULL)
12572 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12575 VmaVectorRemove(m_Blocks, blockIndex);
12576 pBlock->Destroy(m_hAllocator);
12577 vma_delete(m_hAllocator, pBlock);
12581 m_HasEmptyBlock =
true;
12587 #if VMA_STATS_STRING_ENABLED
12589 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12591 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12593 json.BeginObject();
12597 json.WriteString(
"MemoryTypeIndex");
12598 json.WriteNumber(m_MemoryTypeIndex);
12600 json.WriteString(
"BlockSize");
12601 json.WriteNumber(m_PreferredBlockSize);
12603 json.WriteString(
"BlockCount");
12604 json.BeginObject(
true);
12605 if(m_MinBlockCount > 0)
12607 json.WriteString(
"Min");
12608 json.WriteNumber((uint64_t)m_MinBlockCount);
12610 if(m_MaxBlockCount < SIZE_MAX)
12612 json.WriteString(
"Max");
12613 json.WriteNumber((uint64_t)m_MaxBlockCount);
12615 json.WriteString(
"Cur");
12616 json.WriteNumber((uint64_t)m_Blocks.size());
12619 if(m_FrameInUseCount > 0)
12621 json.WriteString(
"FrameInUseCount");
12622 json.WriteNumber(m_FrameInUseCount);
12625 if(m_Algorithm != 0)
12627 json.WriteString(
"Algorithm");
12628 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12633 json.WriteString(
"PreferredBlockSize");
12634 json.WriteNumber(m_PreferredBlockSize);
12637 json.WriteString(
"Blocks");
12638 json.BeginObject();
12639 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12641 json.BeginString();
12642 json.ContinueString(m_Blocks[i]->GetId());
12645 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12652 #endif // #if VMA_STATS_STRING_ENABLED
12654 void VmaBlockVector::Defragment(
12655 class VmaBlockVectorDefragmentationContext* pCtx,
12657 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12658 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12659 VkCommandBuffer commandBuffer)
12661 pCtx->res = VK_SUCCESS;
12663 const VkMemoryPropertyFlags memPropFlags =
12664 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12665 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12667 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12669 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12670 !IsCorruptionDetectionEnabled() &&
12671 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12674 if(canDefragmentOnCpu || canDefragmentOnGpu)
12676 bool defragmentOnGpu;
12678 if(canDefragmentOnGpu != canDefragmentOnCpu)
12680 defragmentOnGpu = canDefragmentOnGpu;
12685 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12686 m_hAllocator->IsIntegratedGpu();
12689 bool overlappingMoveSupported = !defragmentOnGpu;
12691 if(m_hAllocator->m_UseMutex)
12693 m_Mutex.LockWrite();
12694 pCtx->mutexLocked =
true;
12697 pCtx->Begin(overlappingMoveSupported);
12701 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12702 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12703 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12704 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12705 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12708 if(pStats != VMA_NULL)
12710 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12711 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12714 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12715 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12716 if(defragmentOnGpu)
12718 maxGpuBytesToMove -= bytesMoved;
12719 maxGpuAllocationsToMove -= allocationsMoved;
12723 maxCpuBytesToMove -= bytesMoved;
12724 maxCpuAllocationsToMove -= allocationsMoved;
12728 if(pCtx->res >= VK_SUCCESS)
12730 if(defragmentOnGpu)
12732 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12736 ApplyDefragmentationMovesCpu(pCtx, moves);
12742 void VmaBlockVector::DefragmentationEnd(
12743 class VmaBlockVectorDefragmentationContext* pCtx,
12747 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12749 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12750 if(blockCtx.hBuffer)
12752 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12753 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12757 if(pCtx->res >= VK_SUCCESS)
12759 FreeEmptyBlocks(pStats);
12762 if(pCtx->mutexLocked)
12764 VMA_ASSERT(m_hAllocator->m_UseMutex);
12765 m_Mutex.UnlockWrite();
12769 size_t VmaBlockVector::CalcAllocationCount()
const
12772 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12774 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12779 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
12781 if(m_BufferImageGranularity == 1)
12785 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12786 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12788 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12789 VMA_ASSERT(m_Algorithm == 0);
12790 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12791 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12799 void VmaBlockVector::MakePoolAllocationsLost(
12800 uint32_t currentFrameIndex,
12801 size_t* pLostAllocationCount)
12803 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12804 size_t lostAllocationCount = 0;
12805 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12807 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12808 VMA_ASSERT(pBlock);
12809 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12811 if(pLostAllocationCount != VMA_NULL)
12813 *pLostAllocationCount = lostAllocationCount;
12817 VkResult VmaBlockVector::CheckCorruption()
12819 if(!IsCorruptionDetectionEnabled())
12821 return VK_ERROR_FEATURE_NOT_PRESENT;
12824 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12825 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12827 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12828 VMA_ASSERT(pBlock);
12829 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12830 if(res != VK_SUCCESS)
12838 void VmaBlockVector::AddStats(
VmaStats* pStats)
12840 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12841 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12843 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12845 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12847 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12848 VMA_ASSERT(pBlock);
12849 VMA_HEAVY_ASSERT(pBlock->Validate());
12851 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12852 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12853 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12854 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12861 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12863 VmaBlockVector* pBlockVector,
12864 uint32_t currentFrameIndex,
12865 bool overlappingMoveSupported) :
12866 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12867 m_AllocationCount(0),
12868 m_AllAllocations(false),
12870 m_AllocationsMoved(0),
12871 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12874 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12875 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12877 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12878 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12879 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12880 m_Blocks.push_back(pBlockInfo);
12884 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12887 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12889 for(
size_t i = m_Blocks.size(); i--; )
12891 vma_delete(m_hAllocator, m_Blocks[i]);
12895 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12898 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12900 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12901 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12902 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12904 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12905 (*it)->m_Allocations.push_back(allocInfo);
12912 ++m_AllocationCount;
12916 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12917 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12918 VkDeviceSize maxBytesToMove,
12919 uint32_t maxAllocationsToMove)
12921 if(m_Blocks.empty())
12934 size_t srcBlockMinIndex = 0;
12947 size_t srcBlockIndex = m_Blocks.size() - 1;
12948 size_t srcAllocIndex = SIZE_MAX;
12954 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12956 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12959 if(srcBlockIndex == srcBlockMinIndex)
12966 srcAllocIndex = SIZE_MAX;
12971 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12975 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12976 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12978 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12979 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12980 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12981 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12984 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12986 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12987 VmaAllocationRequest dstAllocRequest;
12988 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12989 m_CurrentFrameIndex,
12990 m_pBlockVector->GetFrameInUseCount(),
12991 m_pBlockVector->GetBufferImageGranularity(),
12998 &dstAllocRequest) &&
13000 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13002 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13005 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13006 (m_BytesMoved + size > maxBytesToMove))
13011 VmaDefragmentationMove move;
13012 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13013 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13014 move.srcOffset = srcOffset;
13015 move.dstOffset = dstAllocRequest.offset;
13017 moves.push_back(move);
13019 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13023 allocInfo.m_hAllocation);
13024 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13026 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13028 if(allocInfo.m_pChanged != VMA_NULL)
13030 *allocInfo.m_pChanged = VK_TRUE;
13033 ++m_AllocationsMoved;
13034 m_BytesMoved += size;
13036 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13044 if(srcAllocIndex > 0)
13050 if(srcBlockIndex > 0)
13053 srcAllocIndex = SIZE_MAX;
13063 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13066 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13068 if(m_Blocks[i]->m_HasNonMovableAllocations)
13076 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13077 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13078 VkDeviceSize maxBytesToMove,
13079 uint32_t maxAllocationsToMove)
13081 if(!m_AllAllocations && m_AllocationCount == 0)
13086 const size_t blockCount = m_Blocks.size();
13087 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13089 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13091 if(m_AllAllocations)
13093 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13094 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13095 it != pMetadata->m_Suballocations.end();
13098 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13100 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13101 pBlockInfo->m_Allocations.push_back(allocInfo);
13106 pBlockInfo->CalcHasNonMovableAllocations();
13110 pBlockInfo->SortAllocationsByOffsetDescending();
13116 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
13119 const uint32_t roundCount = 2;
13122 VkResult result = VK_SUCCESS;
13123 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
13125 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
13131 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
13132 size_t dstBlockIndex, VkDeviceSize dstOffset,
13133 size_t srcBlockIndex, VkDeviceSize srcOffset)
13135 if(dstBlockIndex < srcBlockIndex)
13139 if(dstBlockIndex > srcBlockIndex)
13143 if(dstOffset < srcOffset)
13153 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
13155 VmaBlockVector* pBlockVector,
13156 uint32_t currentFrameIndex,
13157 bool overlappingMoveSupported) :
13158 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13159 m_OverlappingMoveSupported(overlappingMoveSupported),
13160 m_AllocationCount(0),
13161 m_AllAllocations(false),
13163 m_AllocationsMoved(0),
13164 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
13166 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
13170 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
13174 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
13175 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13176 VkDeviceSize maxBytesToMove,
13177 uint32_t maxAllocationsToMove)
13179 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
13181 const size_t blockCount = m_pBlockVector->GetBlockCount();
13182 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
13187 PreprocessMetadata();
13191 m_BlockInfos.resize(blockCount);
13192 for(
size_t i = 0; i < blockCount; ++i)
13194 m_BlockInfos[i].origBlockIndex = i;
13197 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13198 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13199 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13204 FreeSpaceDatabase freeSpaceDb;
13206 size_t dstBlockInfoIndex = 0;
13207 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13208 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13209 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13210 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13211 VkDeviceSize dstOffset = 0;
13214 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13216 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13217 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13218 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13219 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13220 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13222 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13223 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13224 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13225 if(m_AllocationsMoved == maxAllocationsToMove ||
13226 m_BytesMoved + srcAllocSize > maxBytesToMove)
13231 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13234 size_t freeSpaceInfoIndex;
13235 VkDeviceSize dstAllocOffset;
13236 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13237 freeSpaceInfoIndex, dstAllocOffset))
13239 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13240 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13241 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13244 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13246 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13250 VmaSuballocation suballoc = *srcSuballocIt;
13251 suballoc.offset = dstAllocOffset;
13252 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13253 m_BytesMoved += srcAllocSize;
13254 ++m_AllocationsMoved;
13256 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13258 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13259 srcSuballocIt = nextSuballocIt;
13261 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13263 VmaDefragmentationMove move = {
13264 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13265 srcAllocOffset, dstAllocOffset,
13267 moves.push_back(move);
13274 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13276 VmaSuballocation suballoc = *srcSuballocIt;
13277 suballoc.offset = dstAllocOffset;
13278 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13279 m_BytesMoved += srcAllocSize;
13280 ++m_AllocationsMoved;
13282 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13284 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13285 srcSuballocIt = nextSuballocIt;
13287 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13289 VmaDefragmentationMove move = {
13290 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13291 srcAllocOffset, dstAllocOffset,
13293 moves.push_back(move);
13298 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13301 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13302 dstAllocOffset + srcAllocSize > dstBlockSize)
13305 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13307 ++dstBlockInfoIndex;
13308 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13309 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13310 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13311 dstBlockSize = pDstMetadata->GetSize();
13313 dstAllocOffset = 0;
13317 if(dstBlockInfoIndex == srcBlockInfoIndex)
13319 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13321 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13323 bool skipOver = overlap;
13324 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13328 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13333 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13335 dstOffset = srcAllocOffset + srcAllocSize;
13341 srcSuballocIt->offset = dstAllocOffset;
13342 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13343 dstOffset = dstAllocOffset + srcAllocSize;
13344 m_BytesMoved += srcAllocSize;
13345 ++m_AllocationsMoved;
13347 VmaDefragmentationMove move = {
13348 srcOrigBlockIndex, dstOrigBlockIndex,
13349 srcAllocOffset, dstAllocOffset,
13351 moves.push_back(move);
13359 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13360 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13362 VmaSuballocation suballoc = *srcSuballocIt;
13363 suballoc.offset = dstAllocOffset;
13364 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13365 dstOffset = dstAllocOffset + srcAllocSize;
13366 m_BytesMoved += srcAllocSize;
13367 ++m_AllocationsMoved;
13369 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13371 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13372 srcSuballocIt = nextSuballocIt;
13374 pDstMetadata->m_Suballocations.push_back(suballoc);
13376 VmaDefragmentationMove move = {
13377 srcOrigBlockIndex, dstOrigBlockIndex,
13378 srcAllocOffset, dstAllocOffset,
13380 moves.push_back(move);
13386 m_BlockInfos.clear();
13388 PostprocessMetadata();
13393 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13395 const size_t blockCount = m_pBlockVector->GetBlockCount();
13396 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13398 VmaBlockMetadata_Generic*
const pMetadata =
13399 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13400 pMetadata->m_FreeCount = 0;
13401 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13402 pMetadata->m_FreeSuballocationsBySize.clear();
13403 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13404 it != pMetadata->m_Suballocations.end(); )
13406 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13408 VmaSuballocationList::iterator nextIt = it;
13410 pMetadata->m_Suballocations.erase(it);
13421 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13423 const size_t blockCount = m_pBlockVector->GetBlockCount();
13424 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13426 VmaBlockMetadata_Generic*
const pMetadata =
13427 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13428 const VkDeviceSize blockSize = pMetadata->GetSize();
13431 if(pMetadata->m_Suballocations.empty())
13433 pMetadata->m_FreeCount = 1;
13435 VmaSuballocation suballoc = {
13439 VMA_SUBALLOCATION_TYPE_FREE };
13440 pMetadata->m_Suballocations.push_back(suballoc);
13441 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13446 VkDeviceSize offset = 0;
13447 VmaSuballocationList::iterator it;
13448 for(it = pMetadata->m_Suballocations.begin();
13449 it != pMetadata->m_Suballocations.end();
13452 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13453 VMA_ASSERT(it->offset >= offset);
13456 if(it->offset > offset)
13458 ++pMetadata->m_FreeCount;
13459 const VkDeviceSize freeSize = it->offset - offset;
13460 VmaSuballocation suballoc = {
13464 VMA_SUBALLOCATION_TYPE_FREE };
13465 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13466 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13468 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13472 pMetadata->m_SumFreeSize -= it->size;
13473 offset = it->offset + it->size;
13477 if(offset < blockSize)
13479 ++pMetadata->m_FreeCount;
13480 const VkDeviceSize freeSize = blockSize - offset;
13481 VmaSuballocation suballoc = {
13485 VMA_SUBALLOCATION_TYPE_FREE };
13486 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13487 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13488 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13490 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13495 pMetadata->m_FreeSuballocationsBySize.begin(),
13496 pMetadata->m_FreeSuballocationsBySize.end(),
13497 VmaSuballocationItemSizeLess());
13500 VMA_HEAVY_ASSERT(pMetadata->Validate());
13504 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13507 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13508 while(it != pMetadata->m_Suballocations.end())
13510 if(it->offset < suballoc.offset)
13515 pMetadata->m_Suballocations.insert(it, suballoc);
13521 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13524 VmaBlockVector* pBlockVector,
13525 uint32_t currFrameIndex) :
13527 mutexLocked(false),
13528 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13529 m_hAllocator(hAllocator),
13530 m_hCustomPool(hCustomPool),
13531 m_pBlockVector(pBlockVector),
13532 m_CurrFrameIndex(currFrameIndex),
13533 m_pAlgorithm(VMA_NULL),
13534 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13535 m_AllAllocations(false)
13539 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13541 vma_delete(m_hAllocator, m_pAlgorithm);
13544 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13546 AllocInfo info = { hAlloc, pChanged };
13547 m_Allocations.push_back(info);
13550 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13552 const bool allAllocations = m_AllAllocations ||
13553 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13565 if(VMA_DEBUG_MARGIN == 0 &&
13567 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13569 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13570 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13574 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13575 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13580 m_pAlgorithm->AddAll();
13584 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13586 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13594 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13596 uint32_t currFrameIndex,
13599 m_hAllocator(hAllocator),
13600 m_CurrFrameIndex(currFrameIndex),
13603 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13605 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13608 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13610 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13612 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13613 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13614 vma_delete(m_hAllocator, pBlockVectorCtx);
13616 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13618 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13619 if(pBlockVectorCtx)
13621 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13622 vma_delete(m_hAllocator, pBlockVectorCtx);
13627 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13629 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13631 VmaPool pool = pPools[poolIndex];
13634 if(pool->m_BlockVector.GetAlgorithm() == 0)
13636 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13638 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13640 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13642 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13647 if(!pBlockVectorDefragCtx)
13649 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13652 &pool->m_BlockVector,
13654 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13657 pBlockVectorDefragCtx->AddAll();
13662 void VmaDefragmentationContext_T::AddAllocations(
13663 uint32_t allocationCount,
13665 VkBool32* pAllocationsChanged)
13668 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13671 VMA_ASSERT(hAlloc);
13673 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13675 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13677 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13679 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13681 if(hAllocPool != VK_NULL_HANDLE)
13684 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13686 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13688 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13690 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13694 if(!pBlockVectorDefragCtx)
13696 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13699 &hAllocPool->m_BlockVector,
13701 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13708 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13709 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13710 if(!pBlockVectorDefragCtx)
13712 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13715 m_hAllocator->m_pBlockVectors[memTypeIndex],
13717 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13721 if(pBlockVectorDefragCtx)
13723 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13724 &pAllocationsChanged[allocIndex] : VMA_NULL;
13725 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13731 VkResult VmaDefragmentationContext_T::Defragment(
13732 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13733 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13741 if(commandBuffer == VK_NULL_HANDLE)
13743 maxGpuBytesToMove = 0;
13744 maxGpuAllocationsToMove = 0;
13747 VkResult res = VK_SUCCESS;
13750 for(uint32_t memTypeIndex = 0;
13751 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13754 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13755 if(pBlockVectorCtx)
13757 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13758 pBlockVectorCtx->GetBlockVector()->Defragment(
13761 maxCpuBytesToMove, maxCpuAllocationsToMove,
13762 maxGpuBytesToMove, maxGpuAllocationsToMove,
13764 if(pBlockVectorCtx->res != VK_SUCCESS)
13766 res = pBlockVectorCtx->res;
13772 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13773 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13776 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13777 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13778 pBlockVectorCtx->GetBlockVector()->Defragment(
13781 maxCpuBytesToMove, maxCpuAllocationsToMove,
13782 maxGpuBytesToMove, maxGpuAllocationsToMove,
13784 if(pBlockVectorCtx->res != VK_SUCCESS)
13786 res = pBlockVectorCtx->res;
13796 #if VMA_RECORDING_ENABLED
13798 VmaRecorder::VmaRecorder() :
13803 m_StartCounter(INT64_MAX)
13809 m_UseMutex = useMutex;
13810 m_Flags = settings.
flags;
13812 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13813 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13816 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13819 return VK_ERROR_INITIALIZATION_FAILED;
13823 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13824 fprintf(m_File,
"%s\n",
"1,6");
13829 VmaRecorder::~VmaRecorder()
13831 if(m_File != VMA_NULL)
13837 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13839 CallParams callParams;
13840 GetBasicParams(callParams);
13842 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13843 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13847 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13849 CallParams callParams;
13850 GetBasicParams(callParams);
13852 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13853 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13859 CallParams callParams;
13860 GetBasicParams(callParams);
13862 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13863 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13874 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13876 CallParams callParams;
13877 GetBasicParams(callParams);
13879 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13880 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13885 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13886 const VkMemoryRequirements& vkMemReq,
13890 CallParams callParams;
13891 GetBasicParams(callParams);
13893 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13894 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13895 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13897 vkMemReq.alignment,
13898 vkMemReq.memoryTypeBits,
13906 userDataStr.GetString());
13910 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13911 const VkMemoryRequirements& vkMemReq,
13913 uint64_t allocationCount,
13916 CallParams callParams;
13917 GetBasicParams(callParams);
13919 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13920 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13921 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13923 vkMemReq.alignment,
13924 vkMemReq.memoryTypeBits,
13931 PrintPointerList(allocationCount, pAllocations);
13932 fprintf(m_File,
",%s\n", userDataStr.GetString());
13936 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13937 const VkMemoryRequirements& vkMemReq,
13938 bool requiresDedicatedAllocation,
13939 bool prefersDedicatedAllocation,
13943 CallParams callParams;
13944 GetBasicParams(callParams);
13946 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13947 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13948 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13950 vkMemReq.alignment,
13951 vkMemReq.memoryTypeBits,
13952 requiresDedicatedAllocation ? 1 : 0,
13953 prefersDedicatedAllocation ? 1 : 0,
13961 userDataStr.GetString());
13965 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13966 const VkMemoryRequirements& vkMemReq,
13967 bool requiresDedicatedAllocation,
13968 bool prefersDedicatedAllocation,
13972 CallParams callParams;
13973 GetBasicParams(callParams);
13975 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13976 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13977 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13979 vkMemReq.alignment,
13980 vkMemReq.memoryTypeBits,
13981 requiresDedicatedAllocation ? 1 : 0,
13982 prefersDedicatedAllocation ? 1 : 0,
13990 userDataStr.GetString());
13994 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13997 CallParams callParams;
13998 GetBasicParams(callParams);
14000 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14001 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14006 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
14007 uint64_t allocationCount,
14010 CallParams callParams;
14011 GetBasicParams(callParams);
14013 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14014 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
14015 PrintPointerList(allocationCount, pAllocations);
14016 fprintf(m_File,
"\n");
14020 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
14022 const void* pUserData)
14024 CallParams callParams;
14025 GetBasicParams(callParams);
14027 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14028 UserDataString userDataStr(
14031 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14033 userDataStr.GetString());
14037 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
14040 CallParams callParams;
14041 GetBasicParams(callParams);
14043 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14044 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14049 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
14052 CallParams callParams;
14053 GetBasicParams(callParams);
14055 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14056 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14061 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
14064 CallParams callParams;
14065 GetBasicParams(callParams);
14067 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14068 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14073 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
14074 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14076 CallParams callParams;
14077 GetBasicParams(callParams);
14079 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14080 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14087 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
14088 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14090 CallParams callParams;
14091 GetBasicParams(callParams);
14093 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14094 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14101 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
14102 const VkBufferCreateInfo& bufCreateInfo,
14106 CallParams callParams;
14107 GetBasicParams(callParams);
14109 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14110 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14111 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14112 bufCreateInfo.flags,
14113 bufCreateInfo.size,
14114 bufCreateInfo.usage,
14115 bufCreateInfo.sharingMode,
14116 allocCreateInfo.
flags,
14117 allocCreateInfo.
usage,
14121 allocCreateInfo.
pool,
14123 userDataStr.GetString());
14127 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
14128 const VkImageCreateInfo& imageCreateInfo,
14132 CallParams callParams;
14133 GetBasicParams(callParams);
14135 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14136 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14137 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14138 imageCreateInfo.flags,
14139 imageCreateInfo.imageType,
14140 imageCreateInfo.format,
14141 imageCreateInfo.extent.width,
14142 imageCreateInfo.extent.height,
14143 imageCreateInfo.extent.depth,
14144 imageCreateInfo.mipLevels,
14145 imageCreateInfo.arrayLayers,
14146 imageCreateInfo.samples,
14147 imageCreateInfo.tiling,
14148 imageCreateInfo.usage,
14149 imageCreateInfo.sharingMode,
14150 imageCreateInfo.initialLayout,
14151 allocCreateInfo.
flags,
14152 allocCreateInfo.
usage,
14156 allocCreateInfo.
pool,
14158 userDataStr.GetString());
14162 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
14165 CallParams callParams;
14166 GetBasicParams(callParams);
14168 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14169 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
14174 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14177 CallParams callParams;
14178 GetBasicParams(callParams);
14180 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14181 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
14186 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
14189 CallParams callParams;
14190 GetBasicParams(callParams);
14192 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14193 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14198 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14201 CallParams callParams;
14202 GetBasicParams(callParams);
14204 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14205 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14210 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14213 CallParams callParams;
14214 GetBasicParams(callParams);
14216 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14217 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14222 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14226 CallParams callParams;
14227 GetBasicParams(callParams);
14229 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14230 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14233 fprintf(m_File,
",");
14235 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14245 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14248 CallParams callParams;
14249 GetBasicParams(callParams);
14251 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14252 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14259 if(pUserData != VMA_NULL)
14263 m_Str = (
const char*)pUserData;
14267 sprintf_s(m_PtrStr,
"%p", pUserData);
14277 void VmaRecorder::WriteConfiguration(
14278 const VkPhysicalDeviceProperties& devProps,
14279 const VkPhysicalDeviceMemoryProperties& memProps,
14280 bool dedicatedAllocationExtensionEnabled,
14281 bool bindMemory2ExtensionEnabled)
14283 fprintf(m_File,
"Config,Begin\n");
14285 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14286 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14287 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14288 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14289 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14290 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14292 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14293 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14294 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14296 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14297 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14299 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14300 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14302 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14303 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14305 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14306 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14309 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14310 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
14312 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14313 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14314 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14315 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14316 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14317 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14318 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14319 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14320 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14322 fprintf(m_File,
"Config,End\n");
14325 void VmaRecorder::GetBasicParams(CallParams& outParams)
14327 outParams.threadId = GetCurrentThreadId();
14329 LARGE_INTEGER counter;
14330 QueryPerformanceCounter(&counter);
14331 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14334 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14338 fprintf(m_File,
"%p", pItems[0]);
14339 for(uint64_t i = 1; i < count; ++i)
14341 fprintf(m_File,
" %p", pItems[i]);
14346 void VmaRecorder::Flush()
14354 #endif // #if VMA_RECORDING_ENABLED
14359 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14360 m_Allocator(pAllocationCallbacks, 1024)
14366 VmaMutexLock mutexLock(m_Mutex);
14367 return m_Allocator.Alloc();
14370 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14372 VmaMutexLock mutexLock(m_Mutex);
14373 m_Allocator.Free(hAlloc);
14384 m_hDevice(pCreateInfo->device),
14385 m_hInstance(pCreateInfo->instance),
14386 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14387 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14388 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14389 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14390 m_HeapSizeLimitMask(0),
14391 m_PreferredLargeHeapBlockSize(0),
14392 m_PhysicalDevice(pCreateInfo->physicalDevice),
14393 m_CurrentFrameIndex(0),
14394 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14395 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14398 ,m_pRecorder(VMA_NULL)
14401 if(VMA_DEBUG_DETECT_CORRUPTION)
14404 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14409 #if !(VMA_DEDICATED_ALLOCATION)
14412 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14415 #if !(VMA_BIND_MEMORY2)
14418 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
14421 #if !(VMA_MEMORY_BUDGET)
14424 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
14428 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14429 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14430 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14432 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14433 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14434 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
14444 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14445 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14447 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14448 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14449 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14450 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14457 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14459 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14460 if(limit != VK_WHOLE_SIZE)
14462 m_HeapSizeLimitMask |= 1u << heapIndex;
14463 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14465 m_MemProps.memoryHeaps[heapIndex].size = limit;
14471 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14473 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14475 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14479 preferredBlockSize,
14482 GetBufferImageGranularity(),
14489 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14496 VkResult res = VK_SUCCESS;
14501 #if VMA_RECORDING_ENABLED
14502 m_pRecorder = vma_new(
this, VmaRecorder)();
14504 if(res != VK_SUCCESS)
14508 m_pRecorder->WriteConfiguration(
14509 m_PhysicalDeviceProperties,
14511 m_UseKhrDedicatedAllocation,
14512 m_UseKhrBindMemory2);
14513 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14515 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14516 return VK_ERROR_FEATURE_NOT_PRESENT;
14520 #if VMA_MEMORY_BUDGET
14521 if(m_UseExtMemoryBudget)
14523 UpdateVulkanBudget();
14525 #endif // #if VMA_MEMORY_BUDGET
14530 VmaAllocator_T::~VmaAllocator_T()
14532 #if VMA_RECORDING_ENABLED
14533 if(m_pRecorder != VMA_NULL)
14535 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14536 vma_delete(
this, m_pRecorder);
14540 VMA_ASSERT(m_Pools.empty());
14542 for(
size_t i = GetMemoryTypeCount(); i--; )
14544 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14546 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14549 vma_delete(
this, m_pDedicatedAllocations[i]);
14550 vma_delete(
this, m_pBlockVectors[i]);
14554 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14556 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14557 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14558 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14559 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14560 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14561 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14562 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14563 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14564 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14565 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14566 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14567 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14568 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14569 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14570 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14571 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14572 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14573 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14574 #if VMA_DEDICATED_ALLOCATION
14575 if(m_UseKhrDedicatedAllocation)
14577 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14578 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14579 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14580 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14582 #endif // #if VMA_DEDICATED_ALLOCATION
14583 #if VMA_BIND_MEMORY2
14584 if(m_UseKhrBindMemory2)
14586 m_VulkanFunctions.vkBindBufferMemory2KHR =
14587 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2KHR");
14588 m_VulkanFunctions.vkBindImageMemory2KHR =
14589 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2KHR");
14591 #endif // #if VMA_BIND_MEMORY2
14592 #if VMA_MEMORY_BUDGET
14593 if(m_UseExtMemoryBudget)
14595 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
14596 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
14597 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2KHR");
14599 #endif // #if VMA_MEMORY_BUDGET
14600 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14602 #define VMA_COPY_IF_NOT_NULL(funcName) \
14603 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
14605 if(pVulkanFunctions != VMA_NULL)
14607 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14608 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14609 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14610 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14611 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14612 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14613 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14614 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14615 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14616 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14617 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14618 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14619 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14620 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14621 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14622 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14623 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14624 #if VMA_DEDICATED_ALLOCATION
14625 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14626 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14628 #if VMA_BIND_MEMORY2
14629 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
14630 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
14632 #if VMA_MEMORY_BUDGET
14633 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
14637 #undef VMA_COPY_IF_NOT_NULL
14641 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14642 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14643 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14644 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14645 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14646 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14647 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14648 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14649 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14650 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14651 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14652 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14653 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14654 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14655 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14656 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14657 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14658 #if VMA_DEDICATED_ALLOCATION
14659 if(m_UseKhrDedicatedAllocation)
14661 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14662 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14665 #if VMA_BIND_MEMORY2
14666 if(m_UseKhrBindMemory2)
14668 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
14669 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
14672 #if VMA_MEMORY_BUDGET
14673 if(m_UseExtMemoryBudget)
14675 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
14680 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14682 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14683 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14684 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14685 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
14688 VkResult VmaAllocator_T::AllocateMemoryOfType(
14690 VkDeviceSize alignment,
14691 bool dedicatedAllocation,
14692 VkBuffer dedicatedBuffer,
14693 VkImage dedicatedImage,
14695 uint32_t memTypeIndex,
14696 VmaSuballocationType suballocType,
14697 size_t allocationCount,
14700 VMA_ASSERT(pAllocations != VMA_NULL);
14701 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14707 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14712 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14713 VMA_ASSERT(blockVector);
14715 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14716 bool preferDedicatedMemory =
14717 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14718 dedicatedAllocation ||
14720 size > preferredBlockSize / 2;
14722 if(preferDedicatedMemory &&
14724 finalCreateInfo.
pool == VK_NULL_HANDLE)
14733 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14737 return AllocateDedicatedMemory(
14753 VkResult res = blockVector->Allocate(
14754 m_CurrentFrameIndex.load(),
14761 if(res == VK_SUCCESS)
14769 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14773 res = AllocateDedicatedMemory(
14780 finalCreateInfo.pUserData,
14785 if(res == VK_SUCCESS)
14788 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14794 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14801 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14803 VmaSuballocationType suballocType,
14804 uint32_t memTypeIndex,
14807 bool isUserDataString,
14809 VkBuffer dedicatedBuffer,
14810 VkImage dedicatedImage,
14811 size_t allocationCount,
14814 VMA_ASSERT(allocationCount > 0 && pAllocations);
14818 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14820 GetBudget(&heapBudget, heapIndex, 1);
14821 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
14823 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14827 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14828 allocInfo.memoryTypeIndex = memTypeIndex;
14829 allocInfo.allocationSize = size;
14831 #if VMA_DEDICATED_ALLOCATION
14832 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14833 if(m_UseKhrDedicatedAllocation)
14835 if(dedicatedBuffer != VK_NULL_HANDLE)
14837 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14838 dedicatedAllocInfo.buffer = dedicatedBuffer;
14839 allocInfo.pNext = &dedicatedAllocInfo;
14841 else if(dedicatedImage != VK_NULL_HANDLE)
14843 dedicatedAllocInfo.image = dedicatedImage;
14844 allocInfo.pNext = &dedicatedAllocInfo;
14847 #endif // #if VMA_DEDICATED_ALLOCATION
14850 VkResult res = VK_SUCCESS;
14851 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14853 res = AllocateDedicatedMemoryPage(
14861 pAllocations + allocIndex);
14862 if(res != VK_SUCCESS)
14868 if(res == VK_SUCCESS)
14872 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14873 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14874 VMA_ASSERT(pDedicatedAllocations);
14875 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14877 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14881 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14886 while(allocIndex--)
14889 VkDeviceMemory hMemory = currAlloc->GetMemory();
14901 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14902 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14903 m_Budget.m_AllocationBytes[heapIndex] -= currAlloc->GetSize();
14904 ++m_Budget.m_OperationsSinceBudgetFetch;
14905 currAlloc->SetUserData(
this, VMA_NULL);
14907 m_AllocationObjectAllocator.Free(currAlloc);
14910 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14916 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14918 VmaSuballocationType suballocType,
14919 uint32_t memTypeIndex,
14920 const VkMemoryAllocateInfo& allocInfo,
14922 bool isUserDataString,
14926 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14927 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14930 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14934 void* pMappedData = VMA_NULL;
14937 res = (*m_VulkanFunctions.vkMapMemory)(
14946 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14947 FreeVulkanMemory(memTypeIndex, size, hMemory);
14952 *pAllocation = m_AllocationObjectAllocator.Allocate();
14953 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
14954 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14955 (*pAllocation)->SetUserData(
this, pUserData);
14956 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14957 m_Budget.m_AllocationBytes[heapIndex] += size;
14958 ++m_Budget.m_OperationsSinceBudgetFetch;
14959 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14961 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14967 void VmaAllocator_T::GetBufferMemoryRequirements(
14969 VkMemoryRequirements& memReq,
14970 bool& requiresDedicatedAllocation,
14971 bool& prefersDedicatedAllocation)
const
14973 #if VMA_DEDICATED_ALLOCATION
14974 if(m_UseKhrDedicatedAllocation)
14976 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14977 memReqInfo.buffer = hBuffer;
14979 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14981 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14982 memReq2.pNext = &memDedicatedReq;
14984 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14986 memReq = memReq2.memoryRequirements;
14987 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14988 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14991 #endif // #if VMA_DEDICATED_ALLOCATION
14993 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14994 requiresDedicatedAllocation =
false;
14995 prefersDedicatedAllocation =
false;
14999 void VmaAllocator_T::GetImageMemoryRequirements(
15001 VkMemoryRequirements& memReq,
15002 bool& requiresDedicatedAllocation,
15003 bool& prefersDedicatedAllocation)
const
15005 #if VMA_DEDICATED_ALLOCATION
15006 if(m_UseKhrDedicatedAllocation)
15008 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
15009 memReqInfo.image = hImage;
15011 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15013 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15014 memReq2.pNext = &memDedicatedReq;
15016 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15018 memReq = memReq2.memoryRequirements;
15019 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15020 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15023 #endif // #if VMA_DEDICATED_ALLOCATION
15025 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
15026 requiresDedicatedAllocation =
false;
15027 prefersDedicatedAllocation =
false;
15031 VkResult VmaAllocator_T::AllocateMemory(
15032 const VkMemoryRequirements& vkMemReq,
15033 bool requiresDedicatedAllocation,
15034 bool prefersDedicatedAllocation,
15035 VkBuffer dedicatedBuffer,
15036 VkImage dedicatedImage,
15038 VmaSuballocationType suballocType,
15039 size_t allocationCount,
15042 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15044 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
15046 if(vkMemReq.size == 0)
15048 return VK_ERROR_VALIDATION_FAILED_EXT;
15053 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
15054 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15059 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
15060 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15062 if(requiresDedicatedAllocation)
15066 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
15067 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15069 if(createInfo.
pool != VK_NULL_HANDLE)
15071 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
15072 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15075 if((createInfo.
pool != VK_NULL_HANDLE) &&
15078 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
15079 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15082 if(createInfo.
pool != VK_NULL_HANDLE)
15084 const VkDeviceSize alignmentForPool = VMA_MAX(
15085 vkMemReq.alignment,
15086 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
15091 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15096 return createInfo.
pool->m_BlockVector.Allocate(
15097 m_CurrentFrameIndex.load(),
15108 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
15109 uint32_t memTypeIndex = UINT32_MAX;
15111 if(res == VK_SUCCESS)
15113 VkDeviceSize alignmentForMemType = VMA_MAX(
15114 vkMemReq.alignment,
15115 GetMemoryTypeMinAlignment(memTypeIndex));
15117 res = AllocateMemoryOfType(
15119 alignmentForMemType,
15120 requiresDedicatedAllocation || prefersDedicatedAllocation,
15129 if(res == VK_SUCCESS)
15139 memoryTypeBits &= ~(1u << memTypeIndex);
15142 if(res == VK_SUCCESS)
15144 alignmentForMemType = VMA_MAX(
15145 vkMemReq.alignment,
15146 GetMemoryTypeMinAlignment(memTypeIndex));
15148 res = AllocateMemoryOfType(
15150 alignmentForMemType,
15151 requiresDedicatedAllocation || prefersDedicatedAllocation,
15160 if(res == VK_SUCCESS)
15170 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15181 void VmaAllocator_T::FreeMemory(
15182 size_t allocationCount,
15185 VMA_ASSERT(pAllocations);
15187 for(
size_t allocIndex = allocationCount; allocIndex--; )
15191 if(allocation != VK_NULL_HANDLE)
15193 if(TouchAllocation(allocation))
15195 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15197 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
15200 switch(allocation->GetType())
15202 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15204 VmaBlockVector* pBlockVector = VMA_NULL;
15205 VmaPool hPool = allocation->GetBlock()->GetParentPool();
15206 if(hPool != VK_NULL_HANDLE)
15208 pBlockVector = &hPool->m_BlockVector;
15212 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15213 pBlockVector = m_pBlockVectors[memTypeIndex];
15215 pBlockVector->Free(allocation);
15218 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15219 FreeDedicatedMemory(allocation);
15226 m_Budget.m_AllocationBytes[MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex())] -= allocation->GetSize();
15227 ++m_Budget.m_OperationsSinceBudgetFetch;
15228 allocation->SetUserData(
this, VMA_NULL);
15229 allocation->Dtor();
15230 m_AllocationObjectAllocator.Free(allocation);
15235 VkResult VmaAllocator_T::ResizeAllocation(
15237 VkDeviceSize newSize)
15240 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
15242 return VK_ERROR_VALIDATION_FAILED_EXT;
15244 if(newSize == alloc->GetSize())
15248 return VK_ERROR_OUT_OF_POOL_MEMORY;
15251 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
15254 InitStatInfo(pStats->
total);
15255 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
15257 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
15261 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15263 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15264 VMA_ASSERT(pBlockVector);
15265 pBlockVector->AddStats(pStats);
15270 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15271 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15273 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
15278 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15280 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15281 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15282 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15283 VMA_ASSERT(pDedicatedAllocVector);
15284 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
15287 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
15288 VmaAddStatInfo(pStats->
total, allocationStatInfo);
15289 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
15290 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15295 VmaPostprocessCalcStatInfo(pStats->
total);
15296 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15297 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15298 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15299 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15302 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
15304 #if VMA_MEMORY_BUDGET
15305 if(m_UseExtMemoryBudget)
15307 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
15309 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
15310 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15312 const uint32_t heapIndex = firstHeap + i;
15314 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15317 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
15319 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
15320 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
15324 outBudget->
usage = 0;
15328 outBudget->
budget = VMA_MIN(
15329 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
15334 UpdateVulkanBudget();
15335 GetBudget(outBudget, firstHeap, heapCount);
15341 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15343 const uint32_t heapIndex = firstHeap + i;
15345 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15349 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
15354 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15356 VkResult VmaAllocator_T::DefragmentationBegin(
15366 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15367 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15370 (*pContext)->AddAllocations(
15373 VkResult res = (*pContext)->Defragment(
15378 if(res != VK_NOT_READY)
15380 vma_delete(
this, *pContext);
15381 *pContext = VMA_NULL;
15387 VkResult VmaAllocator_T::DefragmentationEnd(
15390 vma_delete(
this, context);
15396 if(hAllocation->CanBecomeLost())
15402 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15403 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15406 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15410 pAllocationInfo->
offset = 0;
15411 pAllocationInfo->
size = hAllocation->GetSize();
15413 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15416 else if(localLastUseFrameIndex == localCurrFrameIndex)
15418 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15419 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15420 pAllocationInfo->
offset = hAllocation->GetOffset();
15421 pAllocationInfo->
size = hAllocation->GetSize();
15423 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15428 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15430 localLastUseFrameIndex = localCurrFrameIndex;
15437 #if VMA_STATS_STRING_ENABLED
15438 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15439 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15442 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15443 if(localLastUseFrameIndex == localCurrFrameIndex)
15449 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15451 localLastUseFrameIndex = localCurrFrameIndex;
15457 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15458 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15459 pAllocationInfo->
offset = hAllocation->GetOffset();
15460 pAllocationInfo->
size = hAllocation->GetSize();
15461 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15462 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15466 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15469 if(hAllocation->CanBecomeLost())
15471 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15472 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15475 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15479 else if(localLastUseFrameIndex == localCurrFrameIndex)
15485 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15487 localLastUseFrameIndex = localCurrFrameIndex;
15494 #if VMA_STATS_STRING_ENABLED
15495 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15496 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15499 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15500 if(localLastUseFrameIndex == localCurrFrameIndex)
15506 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15508 localLastUseFrameIndex = localCurrFrameIndex;
15520 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15530 return VK_ERROR_INITIALIZATION_FAILED;
15533 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15535 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15537 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15538 if(res != VK_SUCCESS)
15540 vma_delete(
this, *pPool);
15547 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15548 (*pPool)->SetId(m_NextPoolId++);
15549 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15555 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15559 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15560 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15561 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15564 vma_delete(
this, pool);
15569 pool->m_BlockVector.GetPoolStats(pPoolStats);
15572 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15574 m_CurrentFrameIndex.store(frameIndex);
15576 #if VMA_MEMORY_BUDGET
15577 if(m_UseExtMemoryBudget)
15579 UpdateVulkanBudget();
15581 #endif // #if VMA_MEMORY_BUDGET
15584 void VmaAllocator_T::MakePoolAllocationsLost(
15586 size_t* pLostAllocationCount)
15588 hPool->m_BlockVector.MakePoolAllocationsLost(
15589 m_CurrentFrameIndex.load(),
15590 pLostAllocationCount);
15593 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15595 return hPool->m_BlockVector.CheckCorruption();
15598 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15600 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15603 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15605 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15607 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15608 VMA_ASSERT(pBlockVector);
15609 VkResult localRes = pBlockVector->CheckCorruption();
15612 case VK_ERROR_FEATURE_NOT_PRESENT:
15615 finalRes = VK_SUCCESS;
15625 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15626 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15628 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15630 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15633 case VK_ERROR_FEATURE_NOT_PRESENT:
15636 finalRes = VK_SUCCESS;
15648 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15650 *pAllocation = m_AllocationObjectAllocator.Allocate();
15651 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15652 (*pAllocation)->InitLost();
15655 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15657 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15660 if((m_HeapSizeLimitMask | (1u << heapIndex)) != 0)
15662 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15663 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
15666 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
15667 if(blockBytesAfterAllocation > heapSize)
15669 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15671 if(m_Budget.m_BlockBytes->compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
15679 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
15683 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15685 if(res == VK_SUCCESS)
15687 ++m_Budget.m_OperationsSinceBudgetFetch;
15690 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15692 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15697 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
15703 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15706 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15708 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15712 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15714 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15715 m_Budget.m_BlockBytes[heapIndex] -= size;
15716 ++m_Budget.m_OperationsSinceBudgetFetch;
15719 VkResult VmaAllocator_T::BindVulkanBuffer(
15720 VkDeviceMemory memory,
15721 VkDeviceSize memoryOffset,
15725 if(pNext != VMA_NULL)
15727 #if VMA_BIND_MEMORY2
15728 if(m_UseKhrBindMemory2 && m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
15730 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
15731 bindBufferMemoryInfo.pNext = pNext;
15732 bindBufferMemoryInfo.buffer = buffer;
15733 bindBufferMemoryInfo.memory = memory;
15734 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15735 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15738 #endif // #if VMA_BIND_MEMORY2
15740 return VK_ERROR_EXTENSION_NOT_PRESENT;
15745 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
15749 VkResult VmaAllocator_T::BindVulkanImage(
15750 VkDeviceMemory memory,
15751 VkDeviceSize memoryOffset,
15755 if(pNext != VMA_NULL)
15757 #if VMA_BIND_MEMORY2
15758 if(m_UseKhrBindMemory2 && m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
15760 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
15761 bindBufferMemoryInfo.pNext = pNext;
15762 bindBufferMemoryInfo.image = image;
15763 bindBufferMemoryInfo.memory = memory;
15764 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15765 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15768 #endif // #if VMA_BIND_MEMORY2
15770 return VK_ERROR_EXTENSION_NOT_PRESENT;
15775 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
15779 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15781 if(hAllocation->CanBecomeLost())
15783 return VK_ERROR_MEMORY_MAP_FAILED;
15786 switch(hAllocation->GetType())
15788 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15790 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15791 char *pBytes = VMA_NULL;
15792 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15793 if(res == VK_SUCCESS)
15795 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15796 hAllocation->BlockAllocMap();
15800 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15801 return hAllocation->DedicatedAllocMap(
this, ppData);
15804 return VK_ERROR_MEMORY_MAP_FAILED;
15810 switch(hAllocation->GetType())
15812 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15814 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15815 hAllocation->BlockAllocUnmap();
15816 pBlock->Unmap(
this, 1);
15819 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15820 hAllocation->DedicatedAllocUnmap(
this);
15827 VkResult VmaAllocator_T::BindBufferMemory(
15829 VkDeviceSize allocationLocalOffset,
15833 VkResult res = VK_SUCCESS;
15834 switch(hAllocation->GetType())
15836 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15837 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
15839 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15841 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15842 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15843 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
15852 VkResult VmaAllocator_T::BindImageMemory(
15854 VkDeviceSize allocationLocalOffset,
15858 VkResult res = VK_SUCCESS;
15859 switch(hAllocation->GetType())
15861 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15862 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
15864 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15866 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15867 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15868 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
15877 void VmaAllocator_T::FlushOrInvalidateAllocation(
15879 VkDeviceSize offset, VkDeviceSize size,
15880 VMA_CACHE_OPERATION op)
15882 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15883 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15885 const VkDeviceSize allocationSize = hAllocation->GetSize();
15886 VMA_ASSERT(offset <= allocationSize);
15888 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15890 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15891 memRange.memory = hAllocation->GetMemory();
15893 switch(hAllocation->GetType())
15895 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15896 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15897 if(size == VK_WHOLE_SIZE)
15899 memRange.size = allocationSize - memRange.offset;
15903 VMA_ASSERT(offset + size <= allocationSize);
15904 memRange.size = VMA_MIN(
15905 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15906 allocationSize - memRange.offset);
15910 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15913 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15914 if(size == VK_WHOLE_SIZE)
15916 size = allocationSize - offset;
15920 VMA_ASSERT(offset + size <= allocationSize);
15922 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15925 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15926 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15927 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15928 memRange.offset += allocationOffset;
15929 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15940 case VMA_CACHE_FLUSH:
15941 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15943 case VMA_CACHE_INVALIDATE:
15944 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15953 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
15955 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15957 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15959 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15960 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15961 VMA_ASSERT(pDedicatedAllocations);
15962 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15963 VMA_ASSERT(success);
15966 VkDeviceMemory hMemory = allocation->GetMemory();
15978 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15980 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15983 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
15985 VkBufferCreateInfo dummyBufCreateInfo;
15986 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
15988 uint32_t memoryTypeBits = 0;
15991 VkBuffer buf = VK_NULL_HANDLE;
15992 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
15993 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
15994 if(res == VK_SUCCESS)
15997 VkMemoryRequirements memReq;
15998 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
15999 memoryTypeBits = memReq.memoryTypeBits;
16002 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
16005 return memoryTypeBits;
16008 #if VMA_MEMORY_BUDGET
16010 void VmaAllocator_T::UpdateVulkanBudget()
16012 VMA_ASSERT(m_UseExtMemoryBudget);
16014 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
16016 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
16017 memProps.pNext = &budgetProps;
16019 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
16022 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
16024 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16026 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
16027 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
16028 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
16030 m_Budget.m_OperationsSinceBudgetFetch = 0;
16034 #endif // #if VMA_MEMORY_BUDGET
16036 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
16038 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
16039 !hAllocation->CanBecomeLost() &&
16040 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16042 void* pData = VMA_NULL;
16043 VkResult res = Map(hAllocation, &pData);
16044 if(res == VK_SUCCESS)
16046 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
16047 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
16048 Unmap(hAllocation);
16052 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
16057 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
16059 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
16060 if(memoryTypeBits == UINT32_MAX)
16062 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
16063 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
16065 return memoryTypeBits;
16068 #if VMA_STATS_STRING_ENABLED
16070 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
16072 bool dedicatedAllocationsStarted =
false;
16073 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16075 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16076 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16077 VMA_ASSERT(pDedicatedAllocVector);
16078 if(pDedicatedAllocVector->empty() ==
false)
16080 if(dedicatedAllocationsStarted ==
false)
16082 dedicatedAllocationsStarted =
true;
16083 json.WriteString(
"DedicatedAllocations");
16084 json.BeginObject();
16087 json.BeginString(
"Type ");
16088 json.ContinueString(memTypeIndex);
16093 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
16095 json.BeginObject(
true);
16097 hAlloc->PrintParameters(json);
16104 if(dedicatedAllocationsStarted)
16110 bool allocationsStarted =
false;
16111 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16113 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
16115 if(allocationsStarted ==
false)
16117 allocationsStarted =
true;
16118 json.WriteString(
"DefaultPools");
16119 json.BeginObject();
16122 json.BeginString(
"Type ");
16123 json.ContinueString(memTypeIndex);
16126 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
16129 if(allocationsStarted)
16137 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16138 const size_t poolCount = m_Pools.size();
16141 json.WriteString(
"Pools");
16142 json.BeginObject();
16143 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
16145 json.BeginString();
16146 json.ContinueString(m_Pools[poolIndex]->GetId());
16149 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
16156 #endif // #if VMA_STATS_STRING_ENABLED
16165 VMA_ASSERT(pCreateInfo && pAllocator);
16166 VMA_DEBUG_LOG(
"vmaCreateAllocator");
16168 return (*pAllocator)->Init(pCreateInfo);
16174 if(allocator != VK_NULL_HANDLE)
16176 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
16177 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
16178 vma_delete(&allocationCallbacks, allocator);
16184 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
16186 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
16187 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
16192 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
16194 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
16195 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
16200 uint32_t memoryTypeIndex,
16201 VkMemoryPropertyFlags* pFlags)
16203 VMA_ASSERT(allocator && pFlags);
16204 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
16205 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
16210 uint32_t frameIndex)
16212 VMA_ASSERT(allocator);
16213 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
16215 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16217 allocator->SetCurrentFrameIndex(frameIndex);
16224 VMA_ASSERT(allocator && pStats);
16225 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16226 allocator->CalculateStats(pStats);
16233 VMA_ASSERT(allocator && pBudget);
16234 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16235 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
16238 #if VMA_STATS_STRING_ENABLED
16242 char** ppStatsString,
16243 VkBool32 detailedMap)
16245 VMA_ASSERT(allocator && ppStatsString);
16246 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16248 VmaStringBuilder sb(allocator);
16250 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
16251 json.BeginObject();
16254 allocator->CalculateStats(&stats);
16256 json.WriteString(
"Total");
16257 VmaPrintStatInfo(json, stats.
total);
16259 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
16261 json.BeginString(
"Heap ");
16262 json.ContinueString(heapIndex);
16264 json.BeginObject();
16266 json.WriteString(
"Size");
16267 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
16269 json.WriteString(
"Flags");
16270 json.BeginArray(
true);
16271 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
16273 json.WriteString(
"DEVICE_LOCAL");
16279 json.WriteString(
"Stats");
16280 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
16283 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
16285 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
16287 json.BeginString(
"Type ");
16288 json.ContinueString(typeIndex);
16291 json.BeginObject();
16293 json.WriteString(
"Flags");
16294 json.BeginArray(
true);
16295 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
16296 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
16298 json.WriteString(
"DEVICE_LOCAL");
16300 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16302 json.WriteString(
"HOST_VISIBLE");
16304 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
16306 json.WriteString(
"HOST_COHERENT");
16308 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
16310 json.WriteString(
"HOST_CACHED");
16312 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
16314 json.WriteString(
"LAZILY_ALLOCATED");
16320 json.WriteString(
"Stats");
16321 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
16330 if(detailedMap == VK_TRUE)
16332 allocator->PrintDetailedMap(json);
16338 const size_t len = sb.GetLength();
16339 char*
const pChars = vma_new_array(allocator,
char, len + 1);
16342 memcpy(pChars, sb.GetData(), len);
16344 pChars[len] =
'\0';
16345 *ppStatsString = pChars;
16350 char* pStatsString)
16352 if(pStatsString != VMA_NULL)
16354 VMA_ASSERT(allocator);
16355 size_t len = strlen(pStatsString);
16356 vma_delete_array(allocator, pStatsString, len + 1);
16360 #endif // #if VMA_STATS_STRING_ENABLED
16367 uint32_t memoryTypeBits,
16369 uint32_t* pMemoryTypeIndex)
16371 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16372 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16373 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16380 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
16381 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
16384 switch(pAllocationCreateInfo->
usage)
16389 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16391 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16395 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
16398 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16399 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16401 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16405 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16406 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
16412 *pMemoryTypeIndex = UINT32_MAX;
16413 uint32_t minCost = UINT32_MAX;
16414 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
16415 memTypeIndex < allocator->GetMemoryTypeCount();
16416 ++memTypeIndex, memTypeBit <<= 1)
16419 if((memTypeBit & memoryTypeBits) != 0)
16421 const VkMemoryPropertyFlags currFlags =
16422 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
16424 if((requiredFlags & ~currFlags) == 0)
16427 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
16429 if(currCost < minCost)
16431 *pMemoryTypeIndex = memTypeIndex;
16436 minCost = currCost;
16441 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
16446 const VkBufferCreateInfo* pBufferCreateInfo,
16448 uint32_t* pMemoryTypeIndex)
16450 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16451 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16452 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16453 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16455 const VkDevice hDev = allocator->m_hDevice;
16456 VkBuffer hBuffer = VK_NULL_HANDLE;
16457 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16458 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16459 if(res == VK_SUCCESS)
16461 VkMemoryRequirements memReq = {};
16462 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16463 hDev, hBuffer, &memReq);
16467 memReq.memoryTypeBits,
16468 pAllocationCreateInfo,
16471 allocator->GetVulkanFunctions().vkDestroyBuffer(
16472 hDev, hBuffer, allocator->GetAllocationCallbacks());
16479 const VkImageCreateInfo* pImageCreateInfo,
16481 uint32_t* pMemoryTypeIndex)
16483 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16484 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16485 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16486 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16488 const VkDevice hDev = allocator->m_hDevice;
16489 VkImage hImage = VK_NULL_HANDLE;
16490 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16491 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16492 if(res == VK_SUCCESS)
16494 VkMemoryRequirements memReq = {};
16495 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16496 hDev, hImage, &memReq);
16500 memReq.memoryTypeBits,
16501 pAllocationCreateInfo,
16504 allocator->GetVulkanFunctions().vkDestroyImage(
16505 hDev, hImage, allocator->GetAllocationCallbacks());
16515 VMA_ASSERT(allocator && pCreateInfo && pPool);
16517 VMA_DEBUG_LOG(
"vmaCreatePool");
16519 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16521 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16523 #if VMA_RECORDING_ENABLED
16524 if(allocator->GetRecorder() != VMA_NULL)
16526 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16537 VMA_ASSERT(allocator);
16539 if(pool == VK_NULL_HANDLE)
16544 VMA_DEBUG_LOG(
"vmaDestroyPool");
16546 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16548 #if VMA_RECORDING_ENABLED
16549 if(allocator->GetRecorder() != VMA_NULL)
16551 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16555 allocator->DestroyPool(pool);
16563 VMA_ASSERT(allocator && pool && pPoolStats);
16565 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16567 allocator->GetPoolStats(pool, pPoolStats);
16573 size_t* pLostAllocationCount)
16575 VMA_ASSERT(allocator && pool);
16577 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16579 #if VMA_RECORDING_ENABLED
16580 if(allocator->GetRecorder() != VMA_NULL)
16582 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16586 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16591 VMA_ASSERT(allocator && pool);
16593 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16595 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16597 return allocator->CheckPoolCorruption(pool);
16602 const VkMemoryRequirements* pVkMemoryRequirements,
16607 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16609 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16611 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16613 VkResult result = allocator->AllocateMemory(
16614 *pVkMemoryRequirements,
16620 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16624 #if VMA_RECORDING_ENABLED
16625 if(allocator->GetRecorder() != VMA_NULL)
16627 allocator->GetRecorder()->RecordAllocateMemory(
16628 allocator->GetCurrentFrameIndex(),
16629 *pVkMemoryRequirements,
16635 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16637 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16645 const VkMemoryRequirements* pVkMemoryRequirements,
16647 size_t allocationCount,
16651 if(allocationCount == 0)
16656 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16658 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16660 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16662 VkResult result = allocator->AllocateMemory(
16663 *pVkMemoryRequirements,
16669 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16673 #if VMA_RECORDING_ENABLED
16674 if(allocator->GetRecorder() != VMA_NULL)
16676 allocator->GetRecorder()->RecordAllocateMemoryPages(
16677 allocator->GetCurrentFrameIndex(),
16678 *pVkMemoryRequirements,
16680 (uint64_t)allocationCount,
16685 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16687 for(
size_t i = 0; i < allocationCount; ++i)
16689 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16703 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16705 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16707 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16709 VkMemoryRequirements vkMemReq = {};
16710 bool requiresDedicatedAllocation =
false;
16711 bool prefersDedicatedAllocation =
false;
16712 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16713 requiresDedicatedAllocation,
16714 prefersDedicatedAllocation);
16716 VkResult result = allocator->AllocateMemory(
16718 requiresDedicatedAllocation,
16719 prefersDedicatedAllocation,
16723 VMA_SUBALLOCATION_TYPE_BUFFER,
16727 #if VMA_RECORDING_ENABLED
16728 if(allocator->GetRecorder() != VMA_NULL)
16730 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16731 allocator->GetCurrentFrameIndex(),
16733 requiresDedicatedAllocation,
16734 prefersDedicatedAllocation,
16740 if(pAllocationInfo && result == VK_SUCCESS)
16742 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16755 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16757 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16759 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16761 VkMemoryRequirements vkMemReq = {};
16762 bool requiresDedicatedAllocation =
false;
16763 bool prefersDedicatedAllocation =
false;
16764 allocator->GetImageMemoryRequirements(image, vkMemReq,
16765 requiresDedicatedAllocation, prefersDedicatedAllocation);
16767 VkResult result = allocator->AllocateMemory(
16769 requiresDedicatedAllocation,
16770 prefersDedicatedAllocation,
16774 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16778 #if VMA_RECORDING_ENABLED
16779 if(allocator->GetRecorder() != VMA_NULL)
16781 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16782 allocator->GetCurrentFrameIndex(),
16784 requiresDedicatedAllocation,
16785 prefersDedicatedAllocation,
16791 if(pAllocationInfo && result == VK_SUCCESS)
16793 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16803 VMA_ASSERT(allocator);
16805 if(allocation == VK_NULL_HANDLE)
16810 VMA_DEBUG_LOG(
"vmaFreeMemory");
16812 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16814 #if VMA_RECORDING_ENABLED
16815 if(allocator->GetRecorder() != VMA_NULL)
16817 allocator->GetRecorder()->RecordFreeMemory(
16818 allocator->GetCurrentFrameIndex(),
16823 allocator->FreeMemory(
16830 size_t allocationCount,
16833 if(allocationCount == 0)
16838 VMA_ASSERT(allocator);
16840 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16842 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16844 #if VMA_RECORDING_ENABLED
16845 if(allocator->GetRecorder() != VMA_NULL)
16847 allocator->GetRecorder()->RecordFreeMemoryPages(
16848 allocator->GetCurrentFrameIndex(),
16849 (uint64_t)allocationCount,
16854 allocator->FreeMemory(allocationCount, pAllocations);
16860 VkDeviceSize newSize)
16862 VMA_ASSERT(allocator && allocation);
16864 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16866 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16868 return allocator->ResizeAllocation(allocation, newSize);
16876 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16878 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16880 #if VMA_RECORDING_ENABLED
16881 if(allocator->GetRecorder() != VMA_NULL)
16883 allocator->GetRecorder()->RecordGetAllocationInfo(
16884 allocator->GetCurrentFrameIndex(),
16889 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16896 VMA_ASSERT(allocator && allocation);
16898 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16900 #if VMA_RECORDING_ENABLED
16901 if(allocator->GetRecorder() != VMA_NULL)
16903 allocator->GetRecorder()->RecordTouchAllocation(
16904 allocator->GetCurrentFrameIndex(),
16909 return allocator->TouchAllocation(allocation);
16917 VMA_ASSERT(allocator && allocation);
16919 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16921 allocation->SetUserData(allocator, pUserData);
16923 #if VMA_RECORDING_ENABLED
16924 if(allocator->GetRecorder() != VMA_NULL)
16926 allocator->GetRecorder()->RecordSetAllocationUserData(
16927 allocator->GetCurrentFrameIndex(),
16938 VMA_ASSERT(allocator && pAllocation);
16940 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16942 allocator->CreateLostAllocation(pAllocation);
16944 #if VMA_RECORDING_ENABLED
16945 if(allocator->GetRecorder() != VMA_NULL)
16947 allocator->GetRecorder()->RecordCreateLostAllocation(
16948 allocator->GetCurrentFrameIndex(),
16959 VMA_ASSERT(allocator && allocation && ppData);
16961 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16963 VkResult res = allocator->Map(allocation, ppData);
16965 #if VMA_RECORDING_ENABLED
16966 if(allocator->GetRecorder() != VMA_NULL)
16968 allocator->GetRecorder()->RecordMapMemory(
16969 allocator->GetCurrentFrameIndex(),
16981 VMA_ASSERT(allocator && allocation);
16983 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16985 #if VMA_RECORDING_ENABLED
16986 if(allocator->GetRecorder() != VMA_NULL)
16988 allocator->GetRecorder()->RecordUnmapMemory(
16989 allocator->GetCurrentFrameIndex(),
16994 allocator->Unmap(allocation);
16999 VMA_ASSERT(allocator && allocation);
17001 VMA_DEBUG_LOG(
"vmaFlushAllocation");
17003 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17005 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
17007 #if VMA_RECORDING_ENABLED
17008 if(allocator->GetRecorder() != VMA_NULL)
17010 allocator->GetRecorder()->RecordFlushAllocation(
17011 allocator->GetCurrentFrameIndex(),
17012 allocation, offset, size);
17019 VMA_ASSERT(allocator && allocation);
17021 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
17023 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17025 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
17027 #if VMA_RECORDING_ENABLED
17028 if(allocator->GetRecorder() != VMA_NULL)
17030 allocator->GetRecorder()->RecordInvalidateAllocation(
17031 allocator->GetCurrentFrameIndex(),
17032 allocation, offset, size);
17039 VMA_ASSERT(allocator);
17041 VMA_DEBUG_LOG(
"vmaCheckCorruption");
17043 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17045 return allocator->CheckCorruption(memoryTypeBits);
17051 size_t allocationCount,
17052 VkBool32* pAllocationsChanged,
17062 if(pDefragmentationInfo != VMA_NULL)
17076 if(res == VK_NOT_READY)
17089 VMA_ASSERT(allocator && pInfo && pContext);
17100 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
17102 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
17104 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17106 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
17108 #if VMA_RECORDING_ENABLED
17109 if(allocator->GetRecorder() != VMA_NULL)
17111 allocator->GetRecorder()->RecordDefragmentationBegin(
17112 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
17123 VMA_ASSERT(allocator);
17125 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
17127 if(context != VK_NULL_HANDLE)
17129 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17131 #if VMA_RECORDING_ENABLED
17132 if(allocator->GetRecorder() != VMA_NULL)
17134 allocator->GetRecorder()->RecordDefragmentationEnd(
17135 allocator->GetCurrentFrameIndex(), context);
17139 return allocator->DefragmentationEnd(context);
17152 VMA_ASSERT(allocator && allocation && buffer);
17154 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
17156 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17158 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
17164 VkDeviceSize allocationLocalOffset,
17168 VMA_ASSERT(allocator && allocation && buffer);
17170 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
17172 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17174 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
17182 VMA_ASSERT(allocator && allocation && image);
17184 VMA_DEBUG_LOG(
"vmaBindImageMemory");
17186 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17188 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
17194 VkDeviceSize allocationLocalOffset,
17198 VMA_ASSERT(allocator && allocation && image);
17200 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
17202 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17204 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
17209 const VkBufferCreateInfo* pBufferCreateInfo,
17215 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
17217 if(pBufferCreateInfo->size == 0)
17219 return VK_ERROR_VALIDATION_FAILED_EXT;
17222 VMA_DEBUG_LOG(
"vmaCreateBuffer");
17224 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17226 *pBuffer = VK_NULL_HANDLE;
17227 *pAllocation = VK_NULL_HANDLE;
17230 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
17231 allocator->m_hDevice,
17233 allocator->GetAllocationCallbacks(),
17238 VkMemoryRequirements vkMemReq = {};
17239 bool requiresDedicatedAllocation =
false;
17240 bool prefersDedicatedAllocation =
false;
17241 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
17242 requiresDedicatedAllocation, prefersDedicatedAllocation);
17246 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
17248 VMA_ASSERT(vkMemReq.alignment %
17249 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
17251 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
17253 VMA_ASSERT(vkMemReq.alignment %
17254 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
17256 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
17258 VMA_ASSERT(vkMemReq.alignment %
17259 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
17263 res = allocator->AllocateMemory(
17265 requiresDedicatedAllocation,
17266 prefersDedicatedAllocation,
17269 *pAllocationCreateInfo,
17270 VMA_SUBALLOCATION_TYPE_BUFFER,
17274 #if VMA_RECORDING_ENABLED
17275 if(allocator->GetRecorder() != VMA_NULL)
17277 allocator->GetRecorder()->RecordCreateBuffer(
17278 allocator->GetCurrentFrameIndex(),
17279 *pBufferCreateInfo,
17280 *pAllocationCreateInfo,
17290 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
17295 #if VMA_STATS_STRING_ENABLED
17296 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
17298 if(pAllocationInfo != VMA_NULL)
17300 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17305 allocator->FreeMemory(
17308 *pAllocation = VK_NULL_HANDLE;
17309 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17310 *pBuffer = VK_NULL_HANDLE;
17313 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17314 *pBuffer = VK_NULL_HANDLE;
17325 VMA_ASSERT(allocator);
17327 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17332 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
17334 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17336 #if VMA_RECORDING_ENABLED
17337 if(allocator->GetRecorder() != VMA_NULL)
17339 allocator->GetRecorder()->RecordDestroyBuffer(
17340 allocator->GetCurrentFrameIndex(),
17345 if(buffer != VK_NULL_HANDLE)
17347 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
17350 if(allocation != VK_NULL_HANDLE)
17352 allocator->FreeMemory(
17360 const VkImageCreateInfo* pImageCreateInfo,
17366 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
17368 if(pImageCreateInfo->extent.width == 0 ||
17369 pImageCreateInfo->extent.height == 0 ||
17370 pImageCreateInfo->extent.depth == 0 ||
17371 pImageCreateInfo->mipLevels == 0 ||
17372 pImageCreateInfo->arrayLayers == 0)
17374 return VK_ERROR_VALIDATION_FAILED_EXT;
17377 VMA_DEBUG_LOG(
"vmaCreateImage");
17379 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17381 *pImage = VK_NULL_HANDLE;
17382 *pAllocation = VK_NULL_HANDLE;
17385 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
17386 allocator->m_hDevice,
17388 allocator->GetAllocationCallbacks(),
17392 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
17393 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
17394 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
17397 VkMemoryRequirements vkMemReq = {};
17398 bool requiresDedicatedAllocation =
false;
17399 bool prefersDedicatedAllocation =
false;
17400 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
17401 requiresDedicatedAllocation, prefersDedicatedAllocation);
17403 res = allocator->AllocateMemory(
17405 requiresDedicatedAllocation,
17406 prefersDedicatedAllocation,
17409 *pAllocationCreateInfo,
17414 #if VMA_RECORDING_ENABLED
17415 if(allocator->GetRecorder() != VMA_NULL)
17417 allocator->GetRecorder()->RecordCreateImage(
17418 allocator->GetCurrentFrameIndex(),
17420 *pAllocationCreateInfo,
17430 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
17435 #if VMA_STATS_STRING_ENABLED
17436 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
17438 if(pAllocationInfo != VMA_NULL)
17440 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17445 allocator->FreeMemory(
17448 *pAllocation = VK_NULL_HANDLE;
17449 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17450 *pImage = VK_NULL_HANDLE;
17453 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17454 *pImage = VK_NULL_HANDLE;
17465 VMA_ASSERT(allocator);
17467 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17472 VMA_DEBUG_LOG(
"vmaDestroyImage");
17474 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17476 #if VMA_RECORDING_ENABLED
17477 if(allocator->GetRecorder() != VMA_NULL)
17479 allocator->GetRecorder()->RecordDestroyImage(
17480 allocator->GetCurrentFrameIndex(),
17485 if(image != VK_NULL_HANDLE)
17487 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17489 if(allocation != VK_NULL_HANDLE)
17491 allocator->FreeMemory(
17497 #endif // #ifdef VMA_IMPLEMENTATION