23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1764 #ifndef VMA_RECORDING_ENABLED
1765 #define VMA_RECORDING_ENABLED 0
1769 #define NOMINMAX // For windows.h
1773 #include <vulkan/vulkan.h>
1776 #if VMA_RECORDING_ENABLED
1777 #include <windows.h>
1780 #if !defined(VMA_DEDICATED_ALLOCATION)
1781 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1782 #define VMA_DEDICATED_ALLOCATION 1
1784 #define VMA_DEDICATED_ALLOCATION 0
1788 #if !defined(VMA_BIND_MEMORY2)
1789 #if VK_KHR_bind_memory2
1790 #define VMA_BIND_MEMORY2 1
1792 #define VMA_BIND_MEMORY2 0
1796 #if !defined(VMA_MEMORY_BUDGET)
1797 #if VK_EXT_memory_budget && VK_KHR_get_physical_device_properties2
1798 #define VMA_MEMORY_BUDGET 1
1800 #define VMA_MEMORY_BUDGET 0
1809 #ifndef VMA_CALL_PRE
1810 #define VMA_CALL_PRE
1812 #ifndef VMA_CALL_POST
1813 #define VMA_CALL_POST
1830 uint32_t memoryType,
1831 VkDeviceMemory memory,
1836 uint32_t memoryType,
1837 VkDeviceMemory memory,
1934 #if VMA_DEDICATED_ALLOCATION
1935 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1936 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1938 #if VMA_BIND_MEMORY2
1939 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
1940 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
1942 #if VMA_MEMORY_BUDGET
1943 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
2075 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
2083 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
2093 uint32_t memoryTypeIndex,
2094 VkMemoryPropertyFlags* pFlags);
2106 uint32_t frameIndex);
2202 #ifndef VMA_STATS_STRING_ENABLED
2203 #define VMA_STATS_STRING_ENABLED 1
2206 #if VMA_STATS_STRING_ENABLED
2213 char** ppStatsString,
2214 VkBool32 detailedMap);
2218 char* pStatsString);
2220 #endif // #if VMA_STATS_STRING_ENABLED
2457 uint32_t memoryTypeBits,
2459 uint32_t* pMemoryTypeIndex);
2475 const VkBufferCreateInfo* pBufferCreateInfo,
2477 uint32_t* pMemoryTypeIndex);
2493 const VkImageCreateInfo* pImageCreateInfo,
2495 uint32_t* pMemoryTypeIndex);
2667 size_t* pLostAllocationCount);
2694 const char** ppName);
2787 const VkMemoryRequirements* pVkMemoryRequirements,
2813 const VkMemoryRequirements* pVkMemoryRequirements,
2815 size_t allocationCount,
2860 size_t allocationCount,
2872 VkDeviceSize newSize);
3252 size_t allocationCount,
3253 VkBool32* pAllocationsChanged,
3287 VkDeviceSize allocationLocalOffset,
3321 VkDeviceSize allocationLocalOffset,
3353 const VkBufferCreateInfo* pBufferCreateInfo,
3378 const VkImageCreateInfo* pImageCreateInfo,
3404 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3407 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3408 #define VMA_IMPLEMENTATION
3411 #ifdef VMA_IMPLEMENTATION
3412 #undef VMA_IMPLEMENTATION
3434 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3435 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3447 #if VMA_USE_STL_CONTAINERS
3448 #define VMA_USE_STL_VECTOR 1
3449 #define VMA_USE_STL_UNORDERED_MAP 1
3450 #define VMA_USE_STL_LIST 1
3453 #ifndef VMA_USE_STL_SHARED_MUTEX
3455 #if __cplusplus >= 201703L
3456 #define VMA_USE_STL_SHARED_MUTEX 1
3460 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3461 #define VMA_USE_STL_SHARED_MUTEX 1
3463 #define VMA_USE_STL_SHARED_MUTEX 0
3471 #if VMA_USE_STL_VECTOR
3475 #if VMA_USE_STL_UNORDERED_MAP
3476 #include <unordered_map>
3479 #if VMA_USE_STL_LIST
3488 #include <algorithm>
3493 #define VMA_NULL nullptr
3496 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3498 void *aligned_alloc(
size_t alignment,
size_t size)
3501 if(alignment <
sizeof(
void*))
3503 alignment =
sizeof(
void*);
3506 return memalign(alignment, size);
3508 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3510 void *aligned_alloc(
size_t alignment,
size_t size)
3513 if(alignment <
sizeof(
void*))
3515 alignment =
sizeof(
void*);
3519 if(posix_memalign(&pointer, alignment, size) == 0)
3533 #define VMA_ASSERT(expr) assert(expr)
3535 #define VMA_ASSERT(expr)
3541 #ifndef VMA_HEAVY_ASSERT
3543 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3545 #define VMA_HEAVY_ASSERT(expr)
3549 #ifndef VMA_ALIGN_OF
3550 #define VMA_ALIGN_OF(type) (__alignof(type))
3553 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3555 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3557 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3561 #ifndef VMA_SYSTEM_FREE
3563 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3565 #define VMA_SYSTEM_FREE(ptr) free(ptr)
3570 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
3574 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
3578 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
3582 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
3585 #ifndef VMA_DEBUG_LOG
3586 #define VMA_DEBUG_LOG(format, ...)
3596 #if VMA_STATS_STRING_ENABLED
3597 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3599 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3601 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3603 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3605 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3607 snprintf(outStr, strLen,
"%p", ptr);
3615 void Lock() { m_Mutex.lock(); }
3616 void Unlock() { m_Mutex.unlock(); }
3620 #define VMA_MUTEX VmaMutex
3624 #ifndef VMA_RW_MUTEX
3625 #if VMA_USE_STL_SHARED_MUTEX
3627 #include <shared_mutex>
3631 void LockRead() { m_Mutex.lock_shared(); }
3632 void UnlockRead() { m_Mutex.unlock_shared(); }
3633 void LockWrite() { m_Mutex.lock(); }
3634 void UnlockWrite() { m_Mutex.unlock(); }
3636 std::shared_mutex m_Mutex;
3638 #define VMA_RW_MUTEX VmaRWMutex
3639 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
3645 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3646 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3647 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3648 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3649 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3653 #define VMA_RW_MUTEX VmaRWMutex
3659 void LockRead() { m_Mutex.Lock(); }
3660 void UnlockRead() { m_Mutex.Unlock(); }
3661 void LockWrite() { m_Mutex.Lock(); }
3662 void UnlockWrite() { m_Mutex.Unlock(); }
3666 #define VMA_RW_MUTEX VmaRWMutex
3667 #endif // #if VMA_USE_STL_SHARED_MUTEX
3668 #endif // #ifndef VMA_RW_MUTEX
3673 #ifndef VMA_ATOMIC_UINT32
3675 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
3678 #ifndef VMA_ATOMIC_UINT64
3680 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
3683 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
3688 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
3691 #ifndef VMA_DEBUG_ALIGNMENT
3696 #define VMA_DEBUG_ALIGNMENT (1)
3699 #ifndef VMA_DEBUG_MARGIN
3704 #define VMA_DEBUG_MARGIN (0)
3707 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
3712 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
3715 #ifndef VMA_DEBUG_DETECT_CORRUPTION
3721 #define VMA_DEBUG_DETECT_CORRUPTION (0)
3724 #ifndef VMA_DEBUG_GLOBAL_MUTEX
3729 #define VMA_DEBUG_GLOBAL_MUTEX (0)
3732 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
3737 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
3740 #ifndef VMA_SMALL_HEAP_MAX_SIZE
3741 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
3745 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
3746 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
3750 #ifndef VMA_CLASS_NO_COPY
3751 #define VMA_CLASS_NO_COPY(className) \
3753 className(const className&) = delete; \
3754 className& operator=(const className&) = delete;
3757 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3760 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3762 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3763 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3769 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3771 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3772 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3775 static inline uint32_t VmaCountBitsSet(uint32_t v)
3777 uint32_t c = v - ((v >> 1) & 0x55555555);
3778 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3779 c = ((c >> 4) + c) & 0x0F0F0F0F;
3780 c = ((c >> 8) + c) & 0x00FF00FF;
3781 c = ((c >> 16) + c) & 0x0000FFFF;
3787 template <
typename T>
3788 static inline T VmaAlignUp(T val, T align)
3790 return (val + align - 1) / align * align;
3794 template <
typename T>
3795 static inline T VmaAlignDown(T val, T align)
3797 return val / align * align;
3801 template <
typename T>
3802 static inline T VmaRoundDiv(T x, T y)
3804 return (x + (y / (T)2)) / y;
3812 template <
typename T>
3813 inline bool VmaIsPow2(T x)
3815 return (x & (x-1)) == 0;
3819 static inline uint32_t VmaNextPow2(uint32_t v)
3830 static inline uint64_t VmaNextPow2(uint64_t v)
3844 static inline uint32_t VmaPrevPow2(uint32_t v)
3854 static inline uint64_t VmaPrevPow2(uint64_t v)
3866 static inline bool VmaStrIsEmpty(
const char* pStr)
3868 return pStr == VMA_NULL || *pStr ==
'\0';
3871 #if VMA_STATS_STRING_ENABLED
3873 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3889 #endif // #if VMA_STATS_STRING_ENABLED
3893 template<
typename Iterator,
typename Compare>
3894 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3896 Iterator centerValue = end; --centerValue;
3897 Iterator insertIndex = beg;
3898 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3900 if(cmp(*memTypeIndex, *centerValue))
3902 if(insertIndex != memTypeIndex)
3904 VMA_SWAP(*memTypeIndex, *insertIndex);
3909 if(insertIndex != centerValue)
3911 VMA_SWAP(*insertIndex, *centerValue);
3916 template<
typename Iterator,
typename Compare>
3917 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3921 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3922 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3923 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3927 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
3929 #endif // #ifndef VMA_SORT
3938 static inline bool VmaBlocksOnSamePage(
3939 VkDeviceSize resourceAOffset,
3940 VkDeviceSize resourceASize,
3941 VkDeviceSize resourceBOffset,
3942 VkDeviceSize pageSize)
3944 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3945 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3946 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3947 VkDeviceSize resourceBStart = resourceBOffset;
3948 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3949 return resourceAEndPage == resourceBStartPage;
3952 enum VmaSuballocationType
3954 VMA_SUBALLOCATION_TYPE_FREE = 0,
3955 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3956 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3957 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3958 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3959 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3960 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3969 static inline bool VmaIsBufferImageGranularityConflict(
3970 VmaSuballocationType suballocType1,
3971 VmaSuballocationType suballocType2)
3973 if(suballocType1 > suballocType2)
3975 VMA_SWAP(suballocType1, suballocType2);
3978 switch(suballocType1)
3980 case VMA_SUBALLOCATION_TYPE_FREE:
3982 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3984 case VMA_SUBALLOCATION_TYPE_BUFFER:
3986 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3987 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3988 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3990 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3991 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3992 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3993 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3995 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3996 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4004 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4006 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4007 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4008 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4009 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4011 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4018 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4020 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4021 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4022 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4023 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4025 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4038 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4040 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4041 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4042 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4043 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4049 VMA_CLASS_NO_COPY(VmaMutexLock)
4051 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4052 m_pMutex(useMutex ? &mutex : VMA_NULL)
4053 {
if(m_pMutex) { m_pMutex->Lock(); } }
4055 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4057 VMA_MUTEX* m_pMutex;
4061 struct VmaMutexLockRead
4063 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4065 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4066 m_pMutex(useMutex ? &mutex : VMA_NULL)
4067 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4068 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4070 VMA_RW_MUTEX* m_pMutex;
4074 struct VmaMutexLockWrite
4076 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4078 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4079 m_pMutex(useMutex ? &mutex : VMA_NULL)
4080 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4081 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4083 VMA_RW_MUTEX* m_pMutex;
4086 #if VMA_DEBUG_GLOBAL_MUTEX
4087 static VMA_MUTEX gDebugGlobalMutex;
4088 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4090 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4094 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4105 template <
typename CmpLess,
typename IterT,
typename KeyT>
4106 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4108 size_t down = 0, up = (end - beg);
4111 const size_t mid = (down + up) / 2;
4112 if(cmp(*(beg+mid), key))
4124 template<
typename CmpLess,
typename IterT,
typename KeyT>
4125 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4127 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4128 beg, end, value, cmp);
4130 (!cmp(*it, value) && !cmp(value, *it)))
4142 template<
typename T>
4143 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4145 for(uint32_t i = 0; i < count; ++i)
4147 const T iPtr = arr[i];
4148 if(iPtr == VMA_NULL)
4152 for(uint32_t j = i + 1; j < count; ++j)
4166 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4168 if((pAllocationCallbacks != VMA_NULL) &&
4169 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4171 return (*pAllocationCallbacks->pfnAllocation)(
4172 pAllocationCallbacks->pUserData,
4175 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4179 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4183 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4185 if((pAllocationCallbacks != VMA_NULL) &&
4186 (pAllocationCallbacks->pfnFree != VMA_NULL))
4188 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4192 VMA_SYSTEM_FREE(ptr);
4196 template<
typename T>
4197 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4199 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4202 template<
typename T>
4203 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4205 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4208 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4210 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4212 template<
typename T>
4213 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4216 VmaFree(pAllocationCallbacks, ptr);
4219 template<
typename T>
4220 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4224 for(
size_t i = count; i--; )
4228 VmaFree(pAllocationCallbacks, ptr);
4232 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4234 if(srcStr != VMA_NULL)
4236 const size_t len = strlen(srcStr);
4237 char*
const result = vma_new_array(allocs,
char, len + 1);
4238 memcpy(result, srcStr, len + 1);
4247 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4251 const size_t len = strlen(str);
4252 vma_delete_array(allocs, str, len + 1);
4257 template<
typename T>
4258 class VmaStlAllocator
4261 const VkAllocationCallbacks*
const m_pCallbacks;
4262 typedef T value_type;
4264 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4265 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4267 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4268 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4270 template<
typename U>
4271 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4273 return m_pCallbacks == rhs.m_pCallbacks;
4275 template<
typename U>
4276 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4278 return m_pCallbacks != rhs.m_pCallbacks;
4281 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4284 #if VMA_USE_STL_VECTOR
4286 #define VmaVector std::vector
4288 template<
typename T,
typename allocatorT>
4289 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4291 vec.insert(vec.begin() + index, item);
4294 template<
typename T,
typename allocatorT>
4295 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4297 vec.erase(vec.begin() + index);
4300 #else // #if VMA_USE_STL_VECTOR
4305 template<
typename T,
typename AllocatorT>
4309 typedef T value_type;
4311 VmaVector(
const AllocatorT& allocator) :
4312 m_Allocator(allocator),
4319 VmaVector(
size_t count,
const AllocatorT& allocator) :
4320 m_Allocator(allocator),
4321 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4329 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4330 : VmaVector(count, allocator) {}
4332 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4333 m_Allocator(src.m_Allocator),
4334 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4335 m_Count(src.m_Count),
4336 m_Capacity(src.m_Count)
4340 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4346 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4349 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4353 resize(rhs.m_Count);
4356 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4362 bool empty()
const {
return m_Count == 0; }
4363 size_t size()
const {
return m_Count; }
4364 T* data() {
return m_pArray; }
4365 const T* data()
const {
return m_pArray; }
4367 T& operator[](
size_t index)
4369 VMA_HEAVY_ASSERT(index < m_Count);
4370 return m_pArray[index];
4372 const T& operator[](
size_t index)
const
4374 VMA_HEAVY_ASSERT(index < m_Count);
4375 return m_pArray[index];
4380 VMA_HEAVY_ASSERT(m_Count > 0);
4383 const T& front()
const
4385 VMA_HEAVY_ASSERT(m_Count > 0);
4390 VMA_HEAVY_ASSERT(m_Count > 0);
4391 return m_pArray[m_Count - 1];
4393 const T& back()
const
4395 VMA_HEAVY_ASSERT(m_Count > 0);
4396 return m_pArray[m_Count - 1];
4399 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4401 newCapacity = VMA_MAX(newCapacity, m_Count);
4403 if((newCapacity < m_Capacity) && !freeMemory)
4405 newCapacity = m_Capacity;
4408 if(newCapacity != m_Capacity)
4410 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4413 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4415 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4416 m_Capacity = newCapacity;
4417 m_pArray = newArray;
4421 void resize(
size_t newCount,
bool freeMemory =
false)
4423 size_t newCapacity = m_Capacity;
4424 if(newCount > m_Capacity)
4426 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4430 newCapacity = newCount;
4433 if(newCapacity != m_Capacity)
4435 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4436 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4437 if(elementsToCopy != 0)
4439 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4441 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4442 m_Capacity = newCapacity;
4443 m_pArray = newArray;
4449 void clear(
bool freeMemory =
false)
4451 resize(0, freeMemory);
4454 void insert(
size_t index,
const T& src)
4456 VMA_HEAVY_ASSERT(index <= m_Count);
4457 const size_t oldCount = size();
4458 resize(oldCount + 1);
4459 if(index < oldCount)
4461 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4463 m_pArray[index] = src;
4466 void remove(
size_t index)
4468 VMA_HEAVY_ASSERT(index < m_Count);
4469 const size_t oldCount = size();
4470 if(index < oldCount - 1)
4472 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4474 resize(oldCount - 1);
4477 void push_back(
const T& src)
4479 const size_t newIndex = size();
4480 resize(newIndex + 1);
4481 m_pArray[newIndex] = src;
4486 VMA_HEAVY_ASSERT(m_Count > 0);
4490 void push_front(
const T& src)
4497 VMA_HEAVY_ASSERT(m_Count > 0);
4501 typedef T* iterator;
4503 iterator begin() {
return m_pArray; }
4504 iterator end() {
return m_pArray + m_Count; }
4507 AllocatorT m_Allocator;
4513 template<
typename T,
typename allocatorT>
4514 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4516 vec.insert(index, item);
4519 template<
typename T,
typename allocatorT>
4520 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4525 #endif // #if VMA_USE_STL_VECTOR
4527 template<
typename CmpLess,
typename VectorT>
4528 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4530 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4532 vector.data() + vector.size(),
4534 CmpLess()) - vector.data();
4535 VmaVectorInsert(vector, indexToInsert, value);
4536 return indexToInsert;
4539 template<
typename CmpLess,
typename VectorT>
4540 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4543 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4548 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4550 size_t indexToRemove = it - vector.begin();
4551 VmaVectorRemove(vector, indexToRemove);
4565 template<
typename T>
4566 class VmaPoolAllocator
4568 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4570 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4571 ~VmaPoolAllocator();
4578 uint32_t NextFreeIndex;
4579 alignas(T)
char Value[
sizeof(T)];
4586 uint32_t FirstFreeIndex;
4589 const VkAllocationCallbacks* m_pAllocationCallbacks;
4590 const uint32_t m_FirstBlockCapacity;
4591 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4593 ItemBlock& CreateNewBlock();
4596 template<
typename T>
4597 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4598 m_pAllocationCallbacks(pAllocationCallbacks),
4599 m_FirstBlockCapacity(firstBlockCapacity),
4600 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4602 VMA_ASSERT(m_FirstBlockCapacity > 1);
4605 template<
typename T>
4606 VmaPoolAllocator<T>::~VmaPoolAllocator()
4608 for(
size_t i = m_ItemBlocks.size(); i--; )
4609 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4610 m_ItemBlocks.clear();
4613 template<
typename T>
4614 T* VmaPoolAllocator<T>::Alloc()
4616 for(
size_t i = m_ItemBlocks.size(); i--; )
4618 ItemBlock& block = m_ItemBlocks[i];
4620 if(block.FirstFreeIndex != UINT32_MAX)
4622 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4623 block.FirstFreeIndex = pItem->NextFreeIndex;
4624 T* result = (T*)&pItem->Value;
4631 ItemBlock& newBlock = CreateNewBlock();
4632 Item*
const pItem = &newBlock.pItems[0];
4633 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4634 T* result = (T*)&pItem->Value;
4639 template<
typename T>
4640 void VmaPoolAllocator<T>::Free(T* ptr)
4643 for(
size_t i = m_ItemBlocks.size(); i--; )
4645 ItemBlock& block = m_ItemBlocks[i];
4649 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4652 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4655 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4656 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4657 block.FirstFreeIndex = index;
4661 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4664 template<
typename T>
4665 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4667 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4668 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4670 const ItemBlock newBlock = {
4671 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4675 m_ItemBlocks.push_back(newBlock);
4678 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4679 newBlock.pItems[i].NextFreeIndex = i + 1;
4680 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4681 return m_ItemBlocks.back();
4687 #if VMA_USE_STL_LIST
4689 #define VmaList std::list
4691 #else // #if VMA_USE_STL_LIST
4693 template<
typename T>
4702 template<
typename T>
4705 VMA_CLASS_NO_COPY(VmaRawList)
4707 typedef VmaListItem<T> ItemType;
4709 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4713 size_t GetCount()
const {
return m_Count; }
4714 bool IsEmpty()
const {
return m_Count == 0; }
4716 ItemType* Front() {
return m_pFront; }
4717 const ItemType* Front()
const {
return m_pFront; }
4718 ItemType* Back() {
return m_pBack; }
4719 const ItemType* Back()
const {
return m_pBack; }
4721 ItemType* PushBack();
4722 ItemType* PushFront();
4723 ItemType* PushBack(
const T& value);
4724 ItemType* PushFront(
const T& value);
4729 ItemType* InsertBefore(ItemType* pItem);
4731 ItemType* InsertAfter(ItemType* pItem);
4733 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4734 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4736 void Remove(ItemType* pItem);
4739 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4740 VmaPoolAllocator<ItemType> m_ItemAllocator;
4746 template<
typename T>
4747 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4748 m_pAllocationCallbacks(pAllocationCallbacks),
4749 m_ItemAllocator(pAllocationCallbacks, 128),
4756 template<
typename T>
4757 VmaRawList<T>::~VmaRawList()
4763 template<
typename T>
4764 void VmaRawList<T>::Clear()
4766 if(IsEmpty() ==
false)
4768 ItemType* pItem = m_pBack;
4769 while(pItem != VMA_NULL)
4771 ItemType*
const pPrevItem = pItem->pPrev;
4772 m_ItemAllocator.Free(pItem);
4775 m_pFront = VMA_NULL;
4781 template<
typename T>
4782 VmaListItem<T>* VmaRawList<T>::PushBack()
4784 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4785 pNewItem->pNext = VMA_NULL;
4788 pNewItem->pPrev = VMA_NULL;
4789 m_pFront = pNewItem;
4795 pNewItem->pPrev = m_pBack;
4796 m_pBack->pNext = pNewItem;
4803 template<
typename T>
4804 VmaListItem<T>* VmaRawList<T>::PushFront()
4806 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4807 pNewItem->pPrev = VMA_NULL;
4810 pNewItem->pNext = VMA_NULL;
4811 m_pFront = pNewItem;
4817 pNewItem->pNext = m_pFront;
4818 m_pFront->pPrev = pNewItem;
4819 m_pFront = pNewItem;
4825 template<
typename T>
4826 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4828 ItemType*
const pNewItem = PushBack();
4829 pNewItem->Value = value;
4833 template<
typename T>
4834 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4836 ItemType*
const pNewItem = PushFront();
4837 pNewItem->Value = value;
4841 template<
typename T>
4842 void VmaRawList<T>::PopBack()
4844 VMA_HEAVY_ASSERT(m_Count > 0);
4845 ItemType*
const pBackItem = m_pBack;
4846 ItemType*
const pPrevItem = pBackItem->pPrev;
4847 if(pPrevItem != VMA_NULL)
4849 pPrevItem->pNext = VMA_NULL;
4851 m_pBack = pPrevItem;
4852 m_ItemAllocator.Free(pBackItem);
4856 template<
typename T>
4857 void VmaRawList<T>::PopFront()
4859 VMA_HEAVY_ASSERT(m_Count > 0);
4860 ItemType*
const pFrontItem = m_pFront;
4861 ItemType*
const pNextItem = pFrontItem->pNext;
4862 if(pNextItem != VMA_NULL)
4864 pNextItem->pPrev = VMA_NULL;
4866 m_pFront = pNextItem;
4867 m_ItemAllocator.Free(pFrontItem);
4871 template<
typename T>
4872 void VmaRawList<T>::Remove(ItemType* pItem)
4874 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4875 VMA_HEAVY_ASSERT(m_Count > 0);
4877 if(pItem->pPrev != VMA_NULL)
4879 pItem->pPrev->pNext = pItem->pNext;
4883 VMA_HEAVY_ASSERT(m_pFront == pItem);
4884 m_pFront = pItem->pNext;
4887 if(pItem->pNext != VMA_NULL)
4889 pItem->pNext->pPrev = pItem->pPrev;
4893 VMA_HEAVY_ASSERT(m_pBack == pItem);
4894 m_pBack = pItem->pPrev;
4897 m_ItemAllocator.Free(pItem);
4901 template<
typename T>
4902 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4904 if(pItem != VMA_NULL)
4906 ItemType*
const prevItem = pItem->pPrev;
4907 ItemType*
const newItem = m_ItemAllocator.Alloc();
4908 newItem->pPrev = prevItem;
4909 newItem->pNext = pItem;
4910 pItem->pPrev = newItem;
4911 if(prevItem != VMA_NULL)
4913 prevItem->pNext = newItem;
4917 VMA_HEAVY_ASSERT(m_pFront == pItem);
4927 template<
typename T>
4928 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4930 if(pItem != VMA_NULL)
4932 ItemType*
const nextItem = pItem->pNext;
4933 ItemType*
const newItem = m_ItemAllocator.Alloc();
4934 newItem->pNext = nextItem;
4935 newItem->pPrev = pItem;
4936 pItem->pNext = newItem;
4937 if(nextItem != VMA_NULL)
4939 nextItem->pPrev = newItem;
4943 VMA_HEAVY_ASSERT(m_pBack == pItem);
4953 template<
typename T>
4954 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4956 ItemType*
const newItem = InsertBefore(pItem);
4957 newItem->Value = value;
4961 template<
typename T>
4962 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4964 ItemType*
const newItem = InsertAfter(pItem);
4965 newItem->Value = value;
4969 template<
typename T,
typename AllocatorT>
4972 VMA_CLASS_NO_COPY(VmaList)
4983 T& operator*()
const
4985 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4986 return m_pItem->Value;
4988 T* operator->()
const
4990 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4991 return &m_pItem->Value;
4994 iterator& operator++()
4996 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4997 m_pItem = m_pItem->pNext;
5000 iterator& operator--()
5002 if(m_pItem != VMA_NULL)
5004 m_pItem = m_pItem->pPrev;
5008 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5009 m_pItem = m_pList->Back();
5014 iterator operator++(
int)
5016 iterator result = *
this;
5020 iterator operator--(
int)
5022 iterator result = *
this;
5027 bool operator==(
const iterator& rhs)
const
5029 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5030 return m_pItem == rhs.m_pItem;
5032 bool operator!=(
const iterator& rhs)
const
5034 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5035 return m_pItem != rhs.m_pItem;
5039 VmaRawList<T>* m_pList;
5040 VmaListItem<T>* m_pItem;
5042 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5048 friend class VmaList<T, AllocatorT>;
5051 class const_iterator
5060 const_iterator(
const iterator& src) :
5061 m_pList(src.m_pList),
5062 m_pItem(src.m_pItem)
5066 const T& operator*()
const
5068 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5069 return m_pItem->Value;
5071 const T* operator->()
const
5073 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5074 return &m_pItem->Value;
5077 const_iterator& operator++()
5079 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5080 m_pItem = m_pItem->pNext;
5083 const_iterator& operator--()
5085 if(m_pItem != VMA_NULL)
5087 m_pItem = m_pItem->pPrev;
5091 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5092 m_pItem = m_pList->Back();
5097 const_iterator operator++(
int)
5099 const_iterator result = *
this;
5103 const_iterator operator--(
int)
5105 const_iterator result = *
this;
5110 bool operator==(
const const_iterator& rhs)
const
5112 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5113 return m_pItem == rhs.m_pItem;
5115 bool operator!=(
const const_iterator& rhs)
const
5117 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5118 return m_pItem != rhs.m_pItem;
5122 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5128 const VmaRawList<T>* m_pList;
5129 const VmaListItem<T>* m_pItem;
5131 friend class VmaList<T, AllocatorT>;
5134 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5136 bool empty()
const {
return m_RawList.IsEmpty(); }
5137 size_t size()
const {
return m_RawList.GetCount(); }
5139 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5140 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5142 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5143 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5145 void clear() { m_RawList.Clear(); }
5146 void push_back(
const T& value) { m_RawList.PushBack(value); }
5147 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5148 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5151 VmaRawList<T> m_RawList;
5154 #endif // #if VMA_USE_STL_LIST
5162 #if VMA_USE_STL_UNORDERED_MAP
5164 #define VmaPair std::pair
5166 #define VMA_MAP_TYPE(KeyT, ValueT) \
5167 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5169 #else // #if VMA_USE_STL_UNORDERED_MAP
5171 template<
typename T1,
typename T2>
5177 VmaPair() : first(), second() { }
5178 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5184 template<
typename KeyT,
typename ValueT>
5188 typedef VmaPair<KeyT, ValueT> PairType;
5189 typedef PairType* iterator;
5191 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5193 iterator begin() {
return m_Vector.begin(); }
5194 iterator end() {
return m_Vector.end(); }
5196 void insert(
const PairType& pair);
5197 iterator find(
const KeyT& key);
5198 void erase(iterator it);
5201 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5204 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5206 template<
typename FirstT,
typename SecondT>
5207 struct VmaPairFirstLess
5209 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5211 return lhs.first < rhs.first;
5213 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5215 return lhs.first < rhsFirst;
5219 template<
typename KeyT,
typename ValueT>
5220 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5222 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5224 m_Vector.data() + m_Vector.size(),
5226 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5227 VmaVectorInsert(m_Vector, indexToInsert, pair);
5230 template<
typename KeyT,
typename ValueT>
5231 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5233 PairType* it = VmaBinaryFindFirstNotLess(
5235 m_Vector.data() + m_Vector.size(),
5237 VmaPairFirstLess<KeyT, ValueT>());
5238 if((it != m_Vector.end()) && (it->first == key))
5244 return m_Vector.end();
5248 template<
typename KeyT,
typename ValueT>
5249 void VmaMap<KeyT, ValueT>::erase(iterator it)
5251 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5254 #endif // #if VMA_USE_STL_UNORDERED_MAP
5260 class VmaDeviceMemoryBlock;
5262 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5264 struct VmaAllocation_T
5267 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5271 FLAG_USER_DATA_STRING = 0x01,
5275 enum ALLOCATION_TYPE
5277 ALLOCATION_TYPE_NONE,
5278 ALLOCATION_TYPE_BLOCK,
5279 ALLOCATION_TYPE_DEDICATED,
5286 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
5290 m_MemoryTypeIndex = 0;
5291 m_pUserData = VMA_NULL;
5292 m_LastUseFrameIndex = currentFrameIndex;
5293 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
5294 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
5296 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
5298 #if VMA_STATS_STRING_ENABLED
5299 m_CreationFrameIndex = currentFrameIndex;
5300 m_BufferImageUsage = 0;
5306 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5309 VMA_ASSERT(m_pUserData == VMA_NULL);
5312 void InitBlockAllocation(
5313 VmaDeviceMemoryBlock* block,
5314 VkDeviceSize offset,
5315 VkDeviceSize alignment,
5317 uint32_t memoryTypeIndex,
5318 VmaSuballocationType suballocationType,
5322 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5323 VMA_ASSERT(block != VMA_NULL);
5324 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5325 m_Alignment = alignment;
5327 m_MemoryTypeIndex = memoryTypeIndex;
5328 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5329 m_SuballocationType = (uint8_t)suballocationType;
5330 m_BlockAllocation.m_Block = block;
5331 m_BlockAllocation.m_Offset = offset;
5332 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5337 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5338 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5339 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5340 m_MemoryTypeIndex = 0;
5341 m_BlockAllocation.m_Block = VMA_NULL;
5342 m_BlockAllocation.m_Offset = 0;
5343 m_BlockAllocation.m_CanBecomeLost =
true;
5346 void ChangeBlockAllocation(
5348 VmaDeviceMemoryBlock* block,
5349 VkDeviceSize offset);
5351 void ChangeOffset(VkDeviceSize newOffset);
5354 void InitDedicatedAllocation(
5355 uint32_t memoryTypeIndex,
5356 VkDeviceMemory hMemory,
5357 VmaSuballocationType suballocationType,
5361 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5362 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5363 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5366 m_MemoryTypeIndex = memoryTypeIndex;
5367 m_SuballocationType = (uint8_t)suballocationType;
5368 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5369 m_DedicatedAllocation.m_hMemory = hMemory;
5370 m_DedicatedAllocation.m_pMappedData = pMappedData;
5373 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5374 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5375 VkDeviceSize GetSize()
const {
return m_Size; }
5376 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5377 void* GetUserData()
const {
return m_pUserData; }
5378 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5379 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5381 VmaDeviceMemoryBlock* GetBlock()
const
5383 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5384 return m_BlockAllocation.m_Block;
5386 VkDeviceSize GetOffset()
const;
5387 VkDeviceMemory GetMemory()
const;
5388 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5389 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5390 void* GetMappedData()
const;
5391 bool CanBecomeLost()
const;
5393 uint32_t GetLastUseFrameIndex()
const
5395 return m_LastUseFrameIndex.load();
5397 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5399 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5409 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5411 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5413 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5424 void BlockAllocMap();
5425 void BlockAllocUnmap();
5426 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5429 #if VMA_STATS_STRING_ENABLED
5430 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5431 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5433 void InitBufferImageUsage(uint32_t bufferImageUsage)
5435 VMA_ASSERT(m_BufferImageUsage == 0);
5436 m_BufferImageUsage = bufferImageUsage;
5439 void PrintParameters(
class VmaJsonWriter& json)
const;
5443 VkDeviceSize m_Alignment;
5444 VkDeviceSize m_Size;
5446 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5447 uint32_t m_MemoryTypeIndex;
5449 uint8_t m_SuballocationType;
5456 struct BlockAllocation
5458 VmaDeviceMemoryBlock* m_Block;
5459 VkDeviceSize m_Offset;
5460 bool m_CanBecomeLost;
5464 struct DedicatedAllocation
5466 VkDeviceMemory m_hMemory;
5467 void* m_pMappedData;
5473 BlockAllocation m_BlockAllocation;
5475 DedicatedAllocation m_DedicatedAllocation;
5478 #if VMA_STATS_STRING_ENABLED
5479 uint32_t m_CreationFrameIndex;
5480 uint32_t m_BufferImageUsage;
5490 struct VmaSuballocation
5492 VkDeviceSize offset;
5495 VmaSuballocationType type;
5499 struct VmaSuballocationOffsetLess
5501 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5503 return lhs.offset < rhs.offset;
5506 struct VmaSuballocationOffsetGreater
5508 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5510 return lhs.offset > rhs.offset;
5514 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5517 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5519 enum class VmaAllocationRequestType
5541 struct VmaAllocationRequest
5543 VkDeviceSize offset;
5544 VkDeviceSize sumFreeSize;
5545 VkDeviceSize sumItemSize;
5546 VmaSuballocationList::iterator item;
5547 size_t itemsToMakeLostCount;
5549 VmaAllocationRequestType type;
5551 VkDeviceSize CalcCost()
const
5553 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5561 class VmaBlockMetadata
5565 virtual ~VmaBlockMetadata() { }
5566 virtual void Init(VkDeviceSize size) { m_Size = size; }
5569 virtual bool Validate()
const = 0;
5570 VkDeviceSize GetSize()
const {
return m_Size; }
5571 virtual size_t GetAllocationCount()
const = 0;
5572 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5573 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5575 virtual bool IsEmpty()
const = 0;
5577 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5579 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5581 #if VMA_STATS_STRING_ENABLED
5582 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5588 virtual bool CreateAllocationRequest(
5589 uint32_t currentFrameIndex,
5590 uint32_t frameInUseCount,
5591 VkDeviceSize bufferImageGranularity,
5592 VkDeviceSize allocSize,
5593 VkDeviceSize allocAlignment,
5595 VmaSuballocationType allocType,
5596 bool canMakeOtherLost,
5599 VmaAllocationRequest* pAllocationRequest) = 0;
5601 virtual bool MakeRequestedAllocationsLost(
5602 uint32_t currentFrameIndex,
5603 uint32_t frameInUseCount,
5604 VmaAllocationRequest* pAllocationRequest) = 0;
5606 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5608 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5612 const VmaAllocationRequest& request,
5613 VmaSuballocationType type,
5614 VkDeviceSize allocSize,
5619 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5622 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5624 #if VMA_STATS_STRING_ENABLED
5625 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5626 VkDeviceSize unusedBytes,
5627 size_t allocationCount,
5628 size_t unusedRangeCount)
const;
5629 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5630 VkDeviceSize offset,
5632 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5633 VkDeviceSize offset,
5634 VkDeviceSize size)
const;
5635 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5639 VkDeviceSize m_Size;
5640 const VkAllocationCallbacks* m_pAllocationCallbacks;
5643 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
5644 VMA_ASSERT(0 && "Validation failed: " #cond); \
5648 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5650 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5653 virtual ~VmaBlockMetadata_Generic();
5654 virtual void Init(VkDeviceSize size);
5656 virtual bool Validate()
const;
5657 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5658 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5659 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5660 virtual bool IsEmpty()
const;
5662 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5663 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5665 #if VMA_STATS_STRING_ENABLED
5666 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5669 virtual bool CreateAllocationRequest(
5670 uint32_t currentFrameIndex,
5671 uint32_t frameInUseCount,
5672 VkDeviceSize bufferImageGranularity,
5673 VkDeviceSize allocSize,
5674 VkDeviceSize allocAlignment,
5676 VmaSuballocationType allocType,
5677 bool canMakeOtherLost,
5679 VmaAllocationRequest* pAllocationRequest);
5681 virtual bool MakeRequestedAllocationsLost(
5682 uint32_t currentFrameIndex,
5683 uint32_t frameInUseCount,
5684 VmaAllocationRequest* pAllocationRequest);
5686 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5688 virtual VkResult CheckCorruption(
const void* pBlockData);
5691 const VmaAllocationRequest& request,
5692 VmaSuballocationType type,
5693 VkDeviceSize allocSize,
5697 virtual void FreeAtOffset(VkDeviceSize offset);
5702 bool IsBufferImageGranularityConflictPossible(
5703 VkDeviceSize bufferImageGranularity,
5704 VmaSuballocationType& inOutPrevSuballocType)
const;
5707 friend class VmaDefragmentationAlgorithm_Generic;
5708 friend class VmaDefragmentationAlgorithm_Fast;
5710 uint32_t m_FreeCount;
5711 VkDeviceSize m_SumFreeSize;
5712 VmaSuballocationList m_Suballocations;
5715 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5717 bool ValidateFreeSuballocationList()
const;
5721 bool CheckAllocation(
5722 uint32_t currentFrameIndex,
5723 uint32_t frameInUseCount,
5724 VkDeviceSize bufferImageGranularity,
5725 VkDeviceSize allocSize,
5726 VkDeviceSize allocAlignment,
5727 VmaSuballocationType allocType,
5728 VmaSuballocationList::const_iterator suballocItem,
5729 bool canMakeOtherLost,
5730 VkDeviceSize* pOffset,
5731 size_t* itemsToMakeLostCount,
5732 VkDeviceSize* pSumFreeSize,
5733 VkDeviceSize* pSumItemSize)
const;
5735 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5739 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5742 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5745 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5826 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5828 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5831 virtual ~VmaBlockMetadata_Linear();
5832 virtual void Init(VkDeviceSize size);
5834 virtual bool Validate()
const;
5835 virtual size_t GetAllocationCount()
const;
5836 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5837 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5838 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5840 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5841 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5843 #if VMA_STATS_STRING_ENABLED
5844 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5847 virtual bool CreateAllocationRequest(
5848 uint32_t currentFrameIndex,
5849 uint32_t frameInUseCount,
5850 VkDeviceSize bufferImageGranularity,
5851 VkDeviceSize allocSize,
5852 VkDeviceSize allocAlignment,
5854 VmaSuballocationType allocType,
5855 bool canMakeOtherLost,
5857 VmaAllocationRequest* pAllocationRequest);
5859 virtual bool MakeRequestedAllocationsLost(
5860 uint32_t currentFrameIndex,
5861 uint32_t frameInUseCount,
5862 VmaAllocationRequest* pAllocationRequest);
5864 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5866 virtual VkResult CheckCorruption(
const void* pBlockData);
5869 const VmaAllocationRequest& request,
5870 VmaSuballocationType type,
5871 VkDeviceSize allocSize,
5875 virtual void FreeAtOffset(VkDeviceSize offset);
5885 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5887 enum SECOND_VECTOR_MODE
5889 SECOND_VECTOR_EMPTY,
5894 SECOND_VECTOR_RING_BUFFER,
5900 SECOND_VECTOR_DOUBLE_STACK,
5903 VkDeviceSize m_SumFreeSize;
5904 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5905 uint32_t m_1stVectorIndex;
5906 SECOND_VECTOR_MODE m_2ndVectorMode;
5908 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5909 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5910 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5911 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5914 size_t m_1stNullItemsBeginCount;
5916 size_t m_1stNullItemsMiddleCount;
5918 size_t m_2ndNullItemsCount;
5920 bool ShouldCompact1st()
const;
5921 void CleanupAfterFree();
5923 bool CreateAllocationRequest_LowerAddress(
5924 uint32_t currentFrameIndex,
5925 uint32_t frameInUseCount,
5926 VkDeviceSize bufferImageGranularity,
5927 VkDeviceSize allocSize,
5928 VkDeviceSize allocAlignment,
5929 VmaSuballocationType allocType,
5930 bool canMakeOtherLost,
5932 VmaAllocationRequest* pAllocationRequest);
5933 bool CreateAllocationRequest_UpperAddress(
5934 uint32_t currentFrameIndex,
5935 uint32_t frameInUseCount,
5936 VkDeviceSize bufferImageGranularity,
5937 VkDeviceSize allocSize,
5938 VkDeviceSize allocAlignment,
5939 VmaSuballocationType allocType,
5940 bool canMakeOtherLost,
5942 VmaAllocationRequest* pAllocationRequest);
5956 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5958 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5961 virtual ~VmaBlockMetadata_Buddy();
5962 virtual void Init(VkDeviceSize size);
5964 virtual bool Validate()
const;
5965 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5966 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5967 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5968 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5970 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5971 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5973 #if VMA_STATS_STRING_ENABLED
5974 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5977 virtual bool CreateAllocationRequest(
5978 uint32_t currentFrameIndex,
5979 uint32_t frameInUseCount,
5980 VkDeviceSize bufferImageGranularity,
5981 VkDeviceSize allocSize,
5982 VkDeviceSize allocAlignment,
5984 VmaSuballocationType allocType,
5985 bool canMakeOtherLost,
5987 VmaAllocationRequest* pAllocationRequest);
5989 virtual bool MakeRequestedAllocationsLost(
5990 uint32_t currentFrameIndex,
5991 uint32_t frameInUseCount,
5992 VmaAllocationRequest* pAllocationRequest);
5994 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5996 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5999 const VmaAllocationRequest& request,
6000 VmaSuballocationType type,
6001 VkDeviceSize allocSize,
6004 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6005 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6008 static const VkDeviceSize MIN_NODE_SIZE = 32;
6009 static const size_t MAX_LEVELS = 30;
6011 struct ValidationContext
6013 size_t calculatedAllocationCount;
6014 size_t calculatedFreeCount;
6015 VkDeviceSize calculatedSumFreeSize;
6017 ValidationContext() :
6018 calculatedAllocationCount(0),
6019 calculatedFreeCount(0),
6020 calculatedSumFreeSize(0) { }
6025 VkDeviceSize offset;
6055 VkDeviceSize m_UsableSize;
6056 uint32_t m_LevelCount;
6062 } m_FreeList[MAX_LEVELS];
6064 size_t m_AllocationCount;
6068 VkDeviceSize m_SumFreeSize;
6070 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6071 void DeleteNode(Node* node);
6072 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6073 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6074 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6076 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6077 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6081 void AddToFreeListFront(uint32_t level, Node* node);
6085 void RemoveFromFreeList(uint32_t level, Node* node);
6087 #if VMA_STATS_STRING_ENABLED
6088 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6098 class VmaDeviceMemoryBlock
6100 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6102 VmaBlockMetadata* m_pMetadata;
6106 ~VmaDeviceMemoryBlock()
6108 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6109 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6116 uint32_t newMemoryTypeIndex,
6117 VkDeviceMemory newMemory,
6118 VkDeviceSize newSize,
6120 uint32_t algorithm);
6124 VmaPool GetParentPool()
const {
return m_hParentPool; }
6125 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6126 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6127 uint32_t GetId()
const {
return m_Id; }
6128 void* GetMappedData()
const {
return m_pMappedData; }
6131 bool Validate()
const;
6136 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6139 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6140 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6142 VkResult BindBufferMemory(
6145 VkDeviceSize allocationLocalOffset,
6148 VkResult BindImageMemory(
6151 VkDeviceSize allocationLocalOffset,
6157 uint32_t m_MemoryTypeIndex;
6159 VkDeviceMemory m_hMemory;
6167 uint32_t m_MapCount;
6168 void* m_pMappedData;
6171 struct VmaPointerLess
6173 bool operator()(
const void* lhs,
const void* rhs)
const
6179 struct VmaDefragmentationMove
6181 size_t srcBlockIndex;
6182 size_t dstBlockIndex;
6183 VkDeviceSize srcOffset;
6184 VkDeviceSize dstOffset;
6188 class VmaDefragmentationAlgorithm;
6196 struct VmaBlockVector
6198 VMA_CLASS_NO_COPY(VmaBlockVector)
6203 uint32_t memoryTypeIndex,
6204 VkDeviceSize preferredBlockSize,
6205 size_t minBlockCount,
6206 size_t maxBlockCount,
6207 VkDeviceSize bufferImageGranularity,
6208 uint32_t frameInUseCount,
6209 bool explicitBlockSize,
6210 uint32_t algorithm);
6213 VkResult CreateMinBlocks();
6215 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6216 VmaPool GetParentPool()
const {
return m_hParentPool; }
6217 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6218 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6219 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6220 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6221 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6222 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6226 bool IsEmpty()
const {
return m_Blocks.empty(); }
6227 bool IsCorruptionDetectionEnabled()
const;
6230 uint32_t currentFrameIndex,
6232 VkDeviceSize alignment,
6234 VmaSuballocationType suballocType,
6235 size_t allocationCount,
6243 #if VMA_STATS_STRING_ENABLED
6244 void PrintDetailedMap(
class VmaJsonWriter& json);
6247 void MakePoolAllocationsLost(
6248 uint32_t currentFrameIndex,
6249 size_t* pLostAllocationCount);
6250 VkResult CheckCorruption();
6254 class VmaBlockVectorDefragmentationContext* pCtx,
6256 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6257 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6258 VkCommandBuffer commandBuffer);
6259 void DefragmentationEnd(
6260 class VmaBlockVectorDefragmentationContext* pCtx,
6266 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6267 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6268 size_t CalcAllocationCount()
const;
6269 bool IsBufferImageGranularityConflictPossible()
const;
6272 friend class VmaDefragmentationAlgorithm_Generic;
6276 const uint32_t m_MemoryTypeIndex;
6277 const VkDeviceSize m_PreferredBlockSize;
6278 const size_t m_MinBlockCount;
6279 const size_t m_MaxBlockCount;
6280 const VkDeviceSize m_BufferImageGranularity;
6281 const uint32_t m_FrameInUseCount;
6282 const bool m_ExplicitBlockSize;
6283 const uint32_t m_Algorithm;
6287 bool m_HasEmptyBlock;
6288 VMA_RW_MUTEX m_Mutex;
6290 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6291 uint32_t m_NextBlockId;
6293 VkDeviceSize CalcMaxBlockSize()
const;
6296 void Remove(VmaDeviceMemoryBlock* pBlock);
6300 void IncrementallySortBlocks();
6302 VkResult AllocatePage(
6303 uint32_t currentFrameIndex,
6305 VkDeviceSize alignment,
6307 VmaSuballocationType suballocType,
6311 VkResult AllocateFromBlock(
6312 VmaDeviceMemoryBlock* pBlock,
6313 uint32_t currentFrameIndex,
6315 VkDeviceSize alignment,
6318 VmaSuballocationType suballocType,
6322 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6325 void ApplyDefragmentationMovesCpu(
6326 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6327 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6329 void ApplyDefragmentationMovesGpu(
6330 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6331 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6332 VkCommandBuffer commandBuffer);
6343 VMA_CLASS_NO_COPY(VmaPool_T)
6345 VmaBlockVector m_BlockVector;
6350 VkDeviceSize preferredBlockSize);
6353 uint32_t GetId()
const {
return m_Id; }
6354 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6356 const char* GetName()
const {
return m_Name; }
6357 void SetName(
const char* pName);
6359 #if VMA_STATS_STRING_ENABLED
6375 class VmaDefragmentationAlgorithm
6377 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6379 VmaDefragmentationAlgorithm(
6381 VmaBlockVector* pBlockVector,
6382 uint32_t currentFrameIndex) :
6383 m_hAllocator(hAllocator),
6384 m_pBlockVector(pBlockVector),
6385 m_CurrentFrameIndex(currentFrameIndex)
6388 virtual ~VmaDefragmentationAlgorithm()
6392 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6393 virtual void AddAll() = 0;
6395 virtual VkResult Defragment(
6396 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6397 VkDeviceSize maxBytesToMove,
6398 uint32_t maxAllocationsToMove) = 0;
6400 virtual VkDeviceSize GetBytesMoved()
const = 0;
6401 virtual uint32_t GetAllocationsMoved()
const = 0;
6405 VmaBlockVector*
const m_pBlockVector;
6406 const uint32_t m_CurrentFrameIndex;
6408 struct AllocationInfo
6411 VkBool32* m_pChanged;
6414 m_hAllocation(VK_NULL_HANDLE),
6415 m_pChanged(VMA_NULL)
6419 m_hAllocation(hAlloc),
6420 m_pChanged(pChanged)
6426 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6428 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6430 VmaDefragmentationAlgorithm_Generic(
6432 VmaBlockVector* pBlockVector,
6433 uint32_t currentFrameIndex,
6434 bool overlappingMoveSupported);
6435 virtual ~VmaDefragmentationAlgorithm_Generic();
6437 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6438 virtual void AddAll() { m_AllAllocations =
true; }
6440 virtual VkResult Defragment(
6441 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6442 VkDeviceSize maxBytesToMove,
6443 uint32_t maxAllocationsToMove);
6445 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6446 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6449 uint32_t m_AllocationCount;
6450 bool m_AllAllocations;
6452 VkDeviceSize m_BytesMoved;
6453 uint32_t m_AllocationsMoved;
6455 struct AllocationInfoSizeGreater
6457 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6459 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6463 struct AllocationInfoOffsetGreater
6465 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6467 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6473 size_t m_OriginalBlockIndex;
6474 VmaDeviceMemoryBlock* m_pBlock;
6475 bool m_HasNonMovableAllocations;
6476 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6478 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6479 m_OriginalBlockIndex(SIZE_MAX),
6481 m_HasNonMovableAllocations(true),
6482 m_Allocations(pAllocationCallbacks)
6486 void CalcHasNonMovableAllocations()
6488 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6489 const size_t defragmentAllocCount = m_Allocations.size();
6490 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6493 void SortAllocationsBySizeDescending()
6495 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6498 void SortAllocationsByOffsetDescending()
6500 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6504 struct BlockPointerLess
6506 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
6508 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6510 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6512 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6518 struct BlockInfoCompareMoveDestination
6520 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6522 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6526 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6530 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6538 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6539 BlockInfoVector m_Blocks;
6541 VkResult DefragmentRound(
6542 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6543 VkDeviceSize maxBytesToMove,
6544 uint32_t maxAllocationsToMove);
6546 size_t CalcBlocksWithNonMovableCount()
const;
6548 static bool MoveMakesSense(
6549 size_t dstBlockIndex, VkDeviceSize dstOffset,
6550 size_t srcBlockIndex, VkDeviceSize srcOffset);
6553 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6555 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6557 VmaDefragmentationAlgorithm_Fast(
6559 VmaBlockVector* pBlockVector,
6560 uint32_t currentFrameIndex,
6561 bool overlappingMoveSupported);
6562 virtual ~VmaDefragmentationAlgorithm_Fast();
6564 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6565 virtual void AddAll() { m_AllAllocations =
true; }
6567 virtual VkResult Defragment(
6568 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6569 VkDeviceSize maxBytesToMove,
6570 uint32_t maxAllocationsToMove);
6572 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6573 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6578 size_t origBlockIndex;
6581 class FreeSpaceDatabase
6587 s.blockInfoIndex = SIZE_MAX;
6588 for(
size_t i = 0; i < MAX_COUNT; ++i)
6590 m_FreeSpaces[i] = s;
6594 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6596 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6602 size_t bestIndex = SIZE_MAX;
6603 for(
size_t i = 0; i < MAX_COUNT; ++i)
6606 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6611 if(m_FreeSpaces[i].size < size &&
6612 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6618 if(bestIndex != SIZE_MAX)
6620 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6621 m_FreeSpaces[bestIndex].offset = offset;
6622 m_FreeSpaces[bestIndex].size = size;
6626 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6627 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6629 size_t bestIndex = SIZE_MAX;
6630 VkDeviceSize bestFreeSpaceAfter = 0;
6631 for(
size_t i = 0; i < MAX_COUNT; ++i)
6634 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6636 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6638 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6640 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6642 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6645 bestFreeSpaceAfter = freeSpaceAfter;
6651 if(bestIndex != SIZE_MAX)
6653 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6654 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6656 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6659 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6660 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6661 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6666 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6676 static const size_t MAX_COUNT = 4;
6680 size_t blockInfoIndex;
6681 VkDeviceSize offset;
6683 } m_FreeSpaces[MAX_COUNT];
6686 const bool m_OverlappingMoveSupported;
6688 uint32_t m_AllocationCount;
6689 bool m_AllAllocations;
6691 VkDeviceSize m_BytesMoved;
6692 uint32_t m_AllocationsMoved;
6694 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6696 void PreprocessMetadata();
6697 void PostprocessMetadata();
6698 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6701 struct VmaBlockDefragmentationContext
6705 BLOCK_FLAG_USED = 0x00000001,
6711 class VmaBlockVectorDefragmentationContext
6713 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6717 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6719 VmaBlockVectorDefragmentationContext(
6722 VmaBlockVector* pBlockVector,
6723 uint32_t currFrameIndex);
6724 ~VmaBlockVectorDefragmentationContext();
6726 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6727 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6728 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6730 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6731 void AddAll() { m_AllAllocations =
true; }
6733 void Begin(
bool overlappingMoveSupported);
6740 VmaBlockVector*
const m_pBlockVector;
6741 const uint32_t m_CurrFrameIndex;
6743 VmaDefragmentationAlgorithm* m_pAlgorithm;
6751 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6752 bool m_AllAllocations;
6755 struct VmaDefragmentationContext_T
6758 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6760 VmaDefragmentationContext_T(
6762 uint32_t currFrameIndex,
6765 ~VmaDefragmentationContext_T();
6767 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6768 void AddAllocations(
6769 uint32_t allocationCount,
6771 VkBool32* pAllocationsChanged);
6779 VkResult Defragment(
6780 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6781 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6786 const uint32_t m_CurrFrameIndex;
6787 const uint32_t m_Flags;
6790 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6792 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6795 #if VMA_RECORDING_ENABLED
6802 void WriteConfiguration(
6803 const VkPhysicalDeviceProperties& devProps,
6804 const VkPhysicalDeviceMemoryProperties& memProps,
6805 bool dedicatedAllocationExtensionEnabled,
6806 bool bindMemory2ExtensionEnabled,
6807 bool memoryBudgetExtensionEnabled);
6810 void RecordCreateAllocator(uint32_t frameIndex);
6811 void RecordDestroyAllocator(uint32_t frameIndex);
6812 void RecordCreatePool(uint32_t frameIndex,
6815 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6816 void RecordAllocateMemory(uint32_t frameIndex,
6817 const VkMemoryRequirements& vkMemReq,
6820 void RecordAllocateMemoryPages(uint32_t frameIndex,
6821 const VkMemoryRequirements& vkMemReq,
6823 uint64_t allocationCount,
6825 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6826 const VkMemoryRequirements& vkMemReq,
6827 bool requiresDedicatedAllocation,
6828 bool prefersDedicatedAllocation,
6831 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6832 const VkMemoryRequirements& vkMemReq,
6833 bool requiresDedicatedAllocation,
6834 bool prefersDedicatedAllocation,
6837 void RecordFreeMemory(uint32_t frameIndex,
6839 void RecordFreeMemoryPages(uint32_t frameIndex,
6840 uint64_t allocationCount,
6842 void RecordSetAllocationUserData(uint32_t frameIndex,
6844 const void* pUserData);
6845 void RecordCreateLostAllocation(uint32_t frameIndex,
6847 void RecordMapMemory(uint32_t frameIndex,
6849 void RecordUnmapMemory(uint32_t frameIndex,
6851 void RecordFlushAllocation(uint32_t frameIndex,
6852 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6853 void RecordInvalidateAllocation(uint32_t frameIndex,
6854 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6855 void RecordCreateBuffer(uint32_t frameIndex,
6856 const VkBufferCreateInfo& bufCreateInfo,
6859 void RecordCreateImage(uint32_t frameIndex,
6860 const VkImageCreateInfo& imageCreateInfo,
6863 void RecordDestroyBuffer(uint32_t frameIndex,
6865 void RecordDestroyImage(uint32_t frameIndex,
6867 void RecordTouchAllocation(uint32_t frameIndex,
6869 void RecordGetAllocationInfo(uint32_t frameIndex,
6871 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6873 void RecordDefragmentationBegin(uint32_t frameIndex,
6876 void RecordDefragmentationEnd(uint32_t frameIndex,
6878 void RecordSetPoolName(uint32_t frameIndex,
6889 class UserDataString
6893 const char* GetString()
const {
return m_Str; }
6903 VMA_MUTEX m_FileMutex;
6905 int64_t m_StartCounter;
6907 void GetBasicParams(CallParams& outParams);
6910 template<
typename T>
6911 void PrintPointerList(uint64_t count,
const T* pItems)
6915 fprintf(m_File,
"%p", pItems[0]);
6916 for(uint64_t i = 1; i < count; ++i)
6918 fprintf(m_File,
" %p", pItems[i]);
6923 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6927 #endif // #if VMA_RECORDING_ENABLED
6932 class VmaAllocationObjectAllocator
6934 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6936 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6943 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6946 struct VmaCurrentBudgetData
6948 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
6949 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
6951 #if VMA_MEMORY_BUDGET
6952 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
6953 VMA_RW_MUTEX m_BudgetMutex;
6954 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
6955 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
6956 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
6957 #endif // #if VMA_MEMORY_BUDGET
6959 VmaCurrentBudgetData()
6961 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
6963 m_BlockBytes[heapIndex] = 0;
6964 m_AllocationBytes[heapIndex] = 0;
6965 #if VMA_MEMORY_BUDGET
6966 m_VulkanUsage[heapIndex] = 0;
6967 m_VulkanBudget[heapIndex] = 0;
6968 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
6972 #if VMA_MEMORY_BUDGET
6973 m_OperationsSinceBudgetFetch = 0;
6977 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
6979 m_AllocationBytes[heapIndex] += allocationSize;
6980 #if VMA_MEMORY_BUDGET
6981 ++m_OperationsSinceBudgetFetch;
6985 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
6987 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
6988 m_AllocationBytes[heapIndex] -= allocationSize;
6989 #if VMA_MEMORY_BUDGET
6990 ++m_OperationsSinceBudgetFetch;
6996 struct VmaAllocator_T
6998 VMA_CLASS_NO_COPY(VmaAllocator_T)
7001 bool m_UseKhrDedicatedAllocation;
7002 bool m_UseKhrBindMemory2;
7003 bool m_UseExtMemoryBudget;
7005 VkInstance m_hInstance;
7006 bool m_AllocationCallbacksSpecified;
7007 VkAllocationCallbacks m_AllocationCallbacks;
7009 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7012 uint32_t m_HeapSizeLimitMask;
7014 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7015 VkPhysicalDeviceMemoryProperties m_MemProps;
7018 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7021 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7022 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7023 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7025 VmaCurrentBudgetData m_Budget;
7031 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7033 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7037 return m_VulkanFunctions;
7040 VkDeviceSize GetBufferImageGranularity()
const
7043 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7044 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7047 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7048 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7050 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7052 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7053 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7056 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7058 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7059 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7062 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7064 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7065 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7066 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7069 bool IsIntegratedGpu()
const
7071 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7074 #if VMA_RECORDING_ENABLED
7075 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7078 void GetBufferMemoryRequirements(
7080 VkMemoryRequirements& memReq,
7081 bool& requiresDedicatedAllocation,
7082 bool& prefersDedicatedAllocation)
const;
7083 void GetImageMemoryRequirements(
7085 VkMemoryRequirements& memReq,
7086 bool& requiresDedicatedAllocation,
7087 bool& prefersDedicatedAllocation)
const;
7090 VkResult AllocateMemory(
7091 const VkMemoryRequirements& vkMemReq,
7092 bool requiresDedicatedAllocation,
7093 bool prefersDedicatedAllocation,
7094 VkBuffer dedicatedBuffer,
7095 VkImage dedicatedImage,
7097 VmaSuballocationType suballocType,
7098 size_t allocationCount,
7103 size_t allocationCount,
7106 VkResult ResizeAllocation(
7108 VkDeviceSize newSize);
7110 void CalculateStats(
VmaStats* pStats);
7113 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7115 #if VMA_STATS_STRING_ENABLED
7116 void PrintDetailedMap(
class VmaJsonWriter& json);
7119 VkResult DefragmentationBegin(
7123 VkResult DefragmentationEnd(
7130 void DestroyPool(
VmaPool pool);
7133 void SetCurrentFrameIndex(uint32_t frameIndex);
7134 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7136 void MakePoolAllocationsLost(
7138 size_t* pLostAllocationCount);
7139 VkResult CheckPoolCorruption(
VmaPool hPool);
7140 VkResult CheckCorruption(uint32_t memoryTypeBits);
7145 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7147 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7149 VkResult BindVulkanBuffer(
7150 VkDeviceMemory memory,
7151 VkDeviceSize memoryOffset,
7155 VkResult BindVulkanImage(
7156 VkDeviceMemory memory,
7157 VkDeviceSize memoryOffset,
7164 VkResult BindBufferMemory(
7166 VkDeviceSize allocationLocalOffset,
7169 VkResult BindImageMemory(
7171 VkDeviceSize allocationLocalOffset,
7175 void FlushOrInvalidateAllocation(
7177 VkDeviceSize offset, VkDeviceSize size,
7178 VMA_CACHE_OPERATION op);
7180 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7186 uint32_t GetGpuDefragmentationMemoryTypeBits();
7189 VkDeviceSize m_PreferredLargeHeapBlockSize;
7191 VkPhysicalDevice m_PhysicalDevice;
7192 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7193 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7195 VMA_RW_MUTEX m_PoolsMutex;
7197 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7198 uint32_t m_NextPoolId;
7202 #if VMA_RECORDING_ENABLED
7203 VmaRecorder* m_pRecorder;
7208 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7210 VkResult AllocateMemoryOfType(
7212 VkDeviceSize alignment,
7213 bool dedicatedAllocation,
7214 VkBuffer dedicatedBuffer,
7215 VkImage dedicatedImage,
7217 uint32_t memTypeIndex,
7218 VmaSuballocationType suballocType,
7219 size_t allocationCount,
7223 VkResult AllocateDedicatedMemoryPage(
7225 VmaSuballocationType suballocType,
7226 uint32_t memTypeIndex,
7227 const VkMemoryAllocateInfo& allocInfo,
7229 bool isUserDataString,
7234 VkResult AllocateDedicatedMemory(
7236 VmaSuballocationType suballocType,
7237 uint32_t memTypeIndex,
7240 bool isUserDataString,
7242 VkBuffer dedicatedBuffer,
7243 VkImage dedicatedImage,
7244 size_t allocationCount,
7253 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7255 #if VMA_MEMORY_BUDGET
7256 void UpdateVulkanBudget();
7257 #endif // #if VMA_MEMORY_BUDGET
7263 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7265 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7268 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7270 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7273 template<
typename T>
7276 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7279 template<
typename T>
7280 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7282 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7285 template<
typename T>
7286 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7291 VmaFree(hAllocator, ptr);
7295 template<
typename T>
7296 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7300 for(
size_t i = count; i--; )
7302 VmaFree(hAllocator, ptr);
7309 #if VMA_STATS_STRING_ENABLED
7311 class VmaStringBuilder
7314 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7315 size_t GetLength()
const {
return m_Data.size(); }
7316 const char* GetData()
const {
return m_Data.data(); }
7318 void Add(
char ch) { m_Data.push_back(ch); }
7319 void Add(
const char* pStr);
7320 void AddNewLine() { Add(
'\n'); }
7321 void AddNumber(uint32_t num);
7322 void AddNumber(uint64_t num);
7323 void AddPointer(
const void* ptr);
7326 VmaVector< char, VmaStlAllocator<char> > m_Data;
7329 void VmaStringBuilder::Add(
const char* pStr)
7331 const size_t strLen = strlen(pStr);
7334 const size_t oldCount = m_Data.size();
7335 m_Data.resize(oldCount + strLen);
7336 memcpy(m_Data.data() + oldCount, pStr, strLen);
7340 void VmaStringBuilder::AddNumber(uint32_t num)
7347 *--p =
'0' + (num % 10);
7354 void VmaStringBuilder::AddNumber(uint64_t num)
7361 *--p =
'0' + (num % 10);
7368 void VmaStringBuilder::AddPointer(
const void* ptr)
7371 VmaPtrToStr(buf,
sizeof(buf), ptr);
7375 #endif // #if VMA_STATS_STRING_ENABLED
7380 #if VMA_STATS_STRING_ENABLED
7384 VMA_CLASS_NO_COPY(VmaJsonWriter)
7386 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7389 void BeginObject(
bool singleLine =
false);
7392 void BeginArray(
bool singleLine =
false);
7395 void WriteString(
const char* pStr);
7396 void BeginString(
const char* pStr = VMA_NULL);
7397 void ContinueString(
const char* pStr);
7398 void ContinueString(uint32_t n);
7399 void ContinueString(uint64_t n);
7400 void ContinueString_Pointer(
const void* ptr);
7401 void EndString(
const char* pStr = VMA_NULL);
7403 void WriteNumber(uint32_t n);
7404 void WriteNumber(uint64_t n);
7405 void WriteBool(
bool b);
7409 static const char*
const INDENT;
7411 enum COLLECTION_TYPE
7413 COLLECTION_TYPE_OBJECT,
7414 COLLECTION_TYPE_ARRAY,
7418 COLLECTION_TYPE type;
7419 uint32_t valueCount;
7420 bool singleLineMode;
7423 VmaStringBuilder& m_SB;
7424 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7425 bool m_InsideString;
7427 void BeginValue(
bool isString);
7428 void WriteIndent(
bool oneLess =
false);
7431 const char*
const VmaJsonWriter::INDENT =
" ";
7433 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7435 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7436 m_InsideString(false)
7440 VmaJsonWriter::~VmaJsonWriter()
7442 VMA_ASSERT(!m_InsideString);
7443 VMA_ASSERT(m_Stack.empty());
7446 void VmaJsonWriter::BeginObject(
bool singleLine)
7448 VMA_ASSERT(!m_InsideString);
7454 item.type = COLLECTION_TYPE_OBJECT;
7455 item.valueCount = 0;
7456 item.singleLineMode = singleLine;
7457 m_Stack.push_back(item);
7460 void VmaJsonWriter::EndObject()
7462 VMA_ASSERT(!m_InsideString);
7467 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7471 void VmaJsonWriter::BeginArray(
bool singleLine)
7473 VMA_ASSERT(!m_InsideString);
7479 item.type = COLLECTION_TYPE_ARRAY;
7480 item.valueCount = 0;
7481 item.singleLineMode = singleLine;
7482 m_Stack.push_back(item);
7485 void VmaJsonWriter::EndArray()
7487 VMA_ASSERT(!m_InsideString);
7492 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7496 void VmaJsonWriter::WriteString(
const char* pStr)
7502 void VmaJsonWriter::BeginString(
const char* pStr)
7504 VMA_ASSERT(!m_InsideString);
7508 m_InsideString =
true;
7509 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7511 ContinueString(pStr);
7515 void VmaJsonWriter::ContinueString(
const char* pStr)
7517 VMA_ASSERT(m_InsideString);
7519 const size_t strLen = strlen(pStr);
7520 for(
size_t i = 0; i < strLen; ++i)
7553 VMA_ASSERT(0 &&
"Character not currently supported.");
7559 void VmaJsonWriter::ContinueString(uint32_t n)
7561 VMA_ASSERT(m_InsideString);
7565 void VmaJsonWriter::ContinueString(uint64_t n)
7567 VMA_ASSERT(m_InsideString);
7571 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7573 VMA_ASSERT(m_InsideString);
7574 m_SB.AddPointer(ptr);
7577 void VmaJsonWriter::EndString(
const char* pStr)
7579 VMA_ASSERT(m_InsideString);
7580 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7582 ContinueString(pStr);
7585 m_InsideString =
false;
7588 void VmaJsonWriter::WriteNumber(uint32_t n)
7590 VMA_ASSERT(!m_InsideString);
7595 void VmaJsonWriter::WriteNumber(uint64_t n)
7597 VMA_ASSERT(!m_InsideString);
7602 void VmaJsonWriter::WriteBool(
bool b)
7604 VMA_ASSERT(!m_InsideString);
7606 m_SB.Add(b ?
"true" :
"false");
7609 void VmaJsonWriter::WriteNull()
7611 VMA_ASSERT(!m_InsideString);
7616 void VmaJsonWriter::BeginValue(
bool isString)
7618 if(!m_Stack.empty())
7620 StackItem& currItem = m_Stack.back();
7621 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7622 currItem.valueCount % 2 == 0)
7624 VMA_ASSERT(isString);
7627 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7628 currItem.valueCount % 2 != 0)
7632 else if(currItem.valueCount > 0)
7641 ++currItem.valueCount;
7645 void VmaJsonWriter::WriteIndent(
bool oneLess)
7647 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7651 size_t count = m_Stack.size();
7652 if(count > 0 && oneLess)
7656 for(
size_t i = 0; i < count; ++i)
7663 #endif // #if VMA_STATS_STRING_ENABLED
7667 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7669 if(IsUserDataString())
7671 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7673 FreeUserDataString(hAllocator);
7675 if(pUserData != VMA_NULL)
7677 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
7682 m_pUserData = pUserData;
7686 void VmaAllocation_T::ChangeBlockAllocation(
7688 VmaDeviceMemoryBlock* block,
7689 VkDeviceSize offset)
7691 VMA_ASSERT(block != VMA_NULL);
7692 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7695 if(block != m_BlockAllocation.m_Block)
7697 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7698 if(IsPersistentMap())
7700 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7701 block->Map(hAllocator, mapRefCount, VMA_NULL);
7704 m_BlockAllocation.m_Block = block;
7705 m_BlockAllocation.m_Offset = offset;
7708 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7710 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7711 m_BlockAllocation.m_Offset = newOffset;
7714 VkDeviceSize VmaAllocation_T::GetOffset()
const
7718 case ALLOCATION_TYPE_BLOCK:
7719 return m_BlockAllocation.m_Offset;
7720 case ALLOCATION_TYPE_DEDICATED:
7728 VkDeviceMemory VmaAllocation_T::GetMemory()
const
7732 case ALLOCATION_TYPE_BLOCK:
7733 return m_BlockAllocation.m_Block->GetDeviceMemory();
7734 case ALLOCATION_TYPE_DEDICATED:
7735 return m_DedicatedAllocation.m_hMemory;
7738 return VK_NULL_HANDLE;
7742 void* VmaAllocation_T::GetMappedData()
const
7746 case ALLOCATION_TYPE_BLOCK:
7749 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7750 VMA_ASSERT(pBlockData != VMA_NULL);
7751 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7758 case ALLOCATION_TYPE_DEDICATED:
7759 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7760 return m_DedicatedAllocation.m_pMappedData;
7767 bool VmaAllocation_T::CanBecomeLost()
const
7771 case ALLOCATION_TYPE_BLOCK:
7772 return m_BlockAllocation.m_CanBecomeLost;
7773 case ALLOCATION_TYPE_DEDICATED:
7781 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7783 VMA_ASSERT(CanBecomeLost());
7789 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7792 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7797 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7803 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7813 #if VMA_STATS_STRING_ENABLED
7816 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7825 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
7827 json.WriteString(
"Type");
7828 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7830 json.WriteString(
"Size");
7831 json.WriteNumber(m_Size);
7833 if(m_pUserData != VMA_NULL)
7835 json.WriteString(
"UserData");
7836 if(IsUserDataString())
7838 json.WriteString((
const char*)m_pUserData);
7843 json.ContinueString_Pointer(m_pUserData);
7848 json.WriteString(
"CreationFrameIndex");
7849 json.WriteNumber(m_CreationFrameIndex);
7851 json.WriteString(
"LastUseFrameIndex");
7852 json.WriteNumber(GetLastUseFrameIndex());
7854 if(m_BufferImageUsage != 0)
7856 json.WriteString(
"Usage");
7857 json.WriteNumber(m_BufferImageUsage);
7863 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7865 VMA_ASSERT(IsUserDataString());
7866 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
7867 m_pUserData = VMA_NULL;
7870 void VmaAllocation_T::BlockAllocMap()
7872 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7874 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7880 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7884 void VmaAllocation_T::BlockAllocUnmap()
7886 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7888 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7894 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7898 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7900 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7904 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7906 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7907 *ppData = m_DedicatedAllocation.m_pMappedData;
7913 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7914 return VK_ERROR_MEMORY_MAP_FAILED;
7919 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7920 hAllocator->m_hDevice,
7921 m_DedicatedAllocation.m_hMemory,
7926 if(result == VK_SUCCESS)
7928 m_DedicatedAllocation.m_pMappedData = *ppData;
7935 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7937 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7939 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7944 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7945 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7946 hAllocator->m_hDevice,
7947 m_DedicatedAllocation.m_hMemory);
7952 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7956 #if VMA_STATS_STRING_ENABLED
7958 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7962 json.WriteString(
"Blocks");
7965 json.WriteString(
"Allocations");
7968 json.WriteString(
"UnusedRanges");
7971 json.WriteString(
"UsedBytes");
7974 json.WriteString(
"UnusedBytes");
7979 json.WriteString(
"AllocationSize");
7980 json.BeginObject(
true);
7981 json.WriteString(
"Min");
7983 json.WriteString(
"Avg");
7985 json.WriteString(
"Max");
7992 json.WriteString(
"UnusedRangeSize");
7993 json.BeginObject(
true);
7994 json.WriteString(
"Min");
7996 json.WriteString(
"Avg");
7998 json.WriteString(
"Max");
8006 #endif // #if VMA_STATS_STRING_ENABLED
8008 struct VmaSuballocationItemSizeLess
8011 const VmaSuballocationList::iterator lhs,
8012 const VmaSuballocationList::iterator rhs)
const
8014 return lhs->size < rhs->size;
8017 const VmaSuballocationList::iterator lhs,
8018 VkDeviceSize rhsSize)
const
8020 return lhs->size < rhsSize;
8028 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8030 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8034 #if VMA_STATS_STRING_ENABLED
8036 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8037 VkDeviceSize unusedBytes,
8038 size_t allocationCount,
8039 size_t unusedRangeCount)
const
8043 json.WriteString(
"TotalBytes");
8044 json.WriteNumber(GetSize());
8046 json.WriteString(
"UnusedBytes");
8047 json.WriteNumber(unusedBytes);
8049 json.WriteString(
"Allocations");
8050 json.WriteNumber((uint64_t)allocationCount);
8052 json.WriteString(
"UnusedRanges");
8053 json.WriteNumber((uint64_t)unusedRangeCount);
8055 json.WriteString(
"Suballocations");
8059 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8060 VkDeviceSize offset,
8063 json.BeginObject(
true);
8065 json.WriteString(
"Offset");
8066 json.WriteNumber(offset);
8068 hAllocation->PrintParameters(json);
8073 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8074 VkDeviceSize offset,
8075 VkDeviceSize size)
const
8077 json.BeginObject(
true);
8079 json.WriteString(
"Offset");
8080 json.WriteNumber(offset);
8082 json.WriteString(
"Type");
8083 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8085 json.WriteString(
"Size");
8086 json.WriteNumber(size);
8091 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8097 #endif // #if VMA_STATS_STRING_ENABLED
8102 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8103 VmaBlockMetadata(hAllocator),
8106 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8107 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8111 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8115 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8117 VmaBlockMetadata::Init(size);
8120 m_SumFreeSize = size;
8122 VmaSuballocation suballoc = {};
8123 suballoc.offset = 0;
8124 suballoc.size = size;
8125 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8126 suballoc.hAllocation = VK_NULL_HANDLE;
8128 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8129 m_Suballocations.push_back(suballoc);
8130 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8132 m_FreeSuballocationsBySize.push_back(suballocItem);
8135 bool VmaBlockMetadata_Generic::Validate()
const
8137 VMA_VALIDATE(!m_Suballocations.empty());
8140 VkDeviceSize calculatedOffset = 0;
8142 uint32_t calculatedFreeCount = 0;
8144 VkDeviceSize calculatedSumFreeSize = 0;
8147 size_t freeSuballocationsToRegister = 0;
8149 bool prevFree =
false;
8151 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8152 suballocItem != m_Suballocations.cend();
8155 const VmaSuballocation& subAlloc = *suballocItem;
8158 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8160 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8162 VMA_VALIDATE(!prevFree || !currFree);
8164 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8168 calculatedSumFreeSize += subAlloc.size;
8169 ++calculatedFreeCount;
8170 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8172 ++freeSuballocationsToRegister;
8176 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8180 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8181 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8184 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8187 calculatedOffset += subAlloc.size;
8188 prevFree = currFree;
8193 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8195 VkDeviceSize lastSize = 0;
8196 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8198 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8201 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8203 VMA_VALIDATE(suballocItem->size >= lastSize);
8205 lastSize = suballocItem->size;
8209 VMA_VALIDATE(ValidateFreeSuballocationList());
8210 VMA_VALIDATE(calculatedOffset == GetSize());
8211 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8212 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8217 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8219 if(!m_FreeSuballocationsBySize.empty())
8221 return m_FreeSuballocationsBySize.back()->size;
8229 bool VmaBlockMetadata_Generic::IsEmpty()
const
8231 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8234 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8238 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8250 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8251 suballocItem != m_Suballocations.cend();
8254 const VmaSuballocation& suballoc = *suballocItem;
8255 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8268 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8270 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8272 inoutStats.
size += GetSize();
8279 #if VMA_STATS_STRING_ENABLED
8281 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8283 PrintDetailedMap_Begin(json,
8285 m_Suballocations.size() - (size_t)m_FreeCount,
8289 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8290 suballocItem != m_Suballocations.cend();
8291 ++suballocItem, ++i)
8293 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8295 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8299 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8303 PrintDetailedMap_End(json);
8306 #endif // #if VMA_STATS_STRING_ENABLED
8308 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8309 uint32_t currentFrameIndex,
8310 uint32_t frameInUseCount,
8311 VkDeviceSize bufferImageGranularity,
8312 VkDeviceSize allocSize,
8313 VkDeviceSize allocAlignment,
8315 VmaSuballocationType allocType,
8316 bool canMakeOtherLost,
8318 VmaAllocationRequest* pAllocationRequest)
8320 VMA_ASSERT(allocSize > 0);
8321 VMA_ASSERT(!upperAddress);
8322 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8323 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8324 VMA_HEAVY_ASSERT(Validate());
8326 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8329 if(canMakeOtherLost ==
false &&
8330 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8336 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8337 if(freeSuballocCount > 0)
8342 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8343 m_FreeSuballocationsBySize.data(),
8344 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8345 allocSize + 2 * VMA_DEBUG_MARGIN,
8346 VmaSuballocationItemSizeLess());
8347 size_t index = it - m_FreeSuballocationsBySize.data();
8348 for(; index < freeSuballocCount; ++index)
8353 bufferImageGranularity,
8357 m_FreeSuballocationsBySize[index],
8359 &pAllocationRequest->offset,
8360 &pAllocationRequest->itemsToMakeLostCount,
8361 &pAllocationRequest->sumFreeSize,
8362 &pAllocationRequest->sumItemSize))
8364 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8369 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8371 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8372 it != m_Suballocations.end();
8375 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8378 bufferImageGranularity,
8384 &pAllocationRequest->offset,
8385 &pAllocationRequest->itemsToMakeLostCount,
8386 &pAllocationRequest->sumFreeSize,
8387 &pAllocationRequest->sumItemSize))
8389 pAllocationRequest->item = it;
8397 for(
size_t index = freeSuballocCount; index--; )
8402 bufferImageGranularity,
8406 m_FreeSuballocationsBySize[index],
8408 &pAllocationRequest->offset,
8409 &pAllocationRequest->itemsToMakeLostCount,
8410 &pAllocationRequest->sumFreeSize,
8411 &pAllocationRequest->sumItemSize))
8413 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8420 if(canMakeOtherLost)
8425 VmaAllocationRequest tmpAllocRequest = {};
8426 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8427 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8428 suballocIt != m_Suballocations.end();
8431 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8432 suballocIt->hAllocation->CanBecomeLost())
8437 bufferImageGranularity,
8443 &tmpAllocRequest.offset,
8444 &tmpAllocRequest.itemsToMakeLostCount,
8445 &tmpAllocRequest.sumFreeSize,
8446 &tmpAllocRequest.sumItemSize))
8450 *pAllocationRequest = tmpAllocRequest;
8451 pAllocationRequest->item = suballocIt;
8454 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8456 *pAllocationRequest = tmpAllocRequest;
8457 pAllocationRequest->item = suballocIt;
8470 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8471 uint32_t currentFrameIndex,
8472 uint32_t frameInUseCount,
8473 VmaAllocationRequest* pAllocationRequest)
8475 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8477 while(pAllocationRequest->itemsToMakeLostCount > 0)
8479 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8481 ++pAllocationRequest->item;
8483 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8484 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8485 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8486 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8488 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8489 --pAllocationRequest->itemsToMakeLostCount;
8497 VMA_HEAVY_ASSERT(Validate());
8498 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8499 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8504 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8506 uint32_t lostAllocationCount = 0;
8507 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8508 it != m_Suballocations.end();
8511 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8512 it->hAllocation->CanBecomeLost() &&
8513 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8515 it = FreeSuballocation(it);
8516 ++lostAllocationCount;
8519 return lostAllocationCount;
8522 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8524 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8525 it != m_Suballocations.end();
8528 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8530 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8532 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8533 return VK_ERROR_VALIDATION_FAILED_EXT;
8535 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8537 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8538 return VK_ERROR_VALIDATION_FAILED_EXT;
8546 void VmaBlockMetadata_Generic::Alloc(
8547 const VmaAllocationRequest& request,
8548 VmaSuballocationType type,
8549 VkDeviceSize allocSize,
8552 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8553 VMA_ASSERT(request.item != m_Suballocations.end());
8554 VmaSuballocation& suballoc = *request.item;
8556 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8558 VMA_ASSERT(request.offset >= suballoc.offset);
8559 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8560 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8561 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8565 UnregisterFreeSuballocation(request.item);
8567 suballoc.offset = request.offset;
8568 suballoc.size = allocSize;
8569 suballoc.type = type;
8570 suballoc.hAllocation = hAllocation;
8575 VmaSuballocation paddingSuballoc = {};
8576 paddingSuballoc.offset = request.offset + allocSize;
8577 paddingSuballoc.size = paddingEnd;
8578 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8579 VmaSuballocationList::iterator next = request.item;
8581 const VmaSuballocationList::iterator paddingEndItem =
8582 m_Suballocations.insert(next, paddingSuballoc);
8583 RegisterFreeSuballocation(paddingEndItem);
8589 VmaSuballocation paddingSuballoc = {};
8590 paddingSuballoc.offset = request.offset - paddingBegin;
8591 paddingSuballoc.size = paddingBegin;
8592 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8593 const VmaSuballocationList::iterator paddingBeginItem =
8594 m_Suballocations.insert(request.item, paddingSuballoc);
8595 RegisterFreeSuballocation(paddingBeginItem);
8599 m_FreeCount = m_FreeCount - 1;
8600 if(paddingBegin > 0)
8608 m_SumFreeSize -= allocSize;
8611 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8613 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8614 suballocItem != m_Suballocations.end();
8617 VmaSuballocation& suballoc = *suballocItem;
8618 if(suballoc.hAllocation == allocation)
8620 FreeSuballocation(suballocItem);
8621 VMA_HEAVY_ASSERT(Validate());
8625 VMA_ASSERT(0 &&
"Not found!");
8628 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8630 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8631 suballocItem != m_Suballocations.end();
8634 VmaSuballocation& suballoc = *suballocItem;
8635 if(suballoc.offset == offset)
8637 FreeSuballocation(suballocItem);
8641 VMA_ASSERT(0 &&
"Not found!");
8644 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
8646 VkDeviceSize lastSize = 0;
8647 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8649 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8651 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8652 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8653 VMA_VALIDATE(it->size >= lastSize);
8654 lastSize = it->size;
8659 bool VmaBlockMetadata_Generic::CheckAllocation(
8660 uint32_t currentFrameIndex,
8661 uint32_t frameInUseCount,
8662 VkDeviceSize bufferImageGranularity,
8663 VkDeviceSize allocSize,
8664 VkDeviceSize allocAlignment,
8665 VmaSuballocationType allocType,
8666 VmaSuballocationList::const_iterator suballocItem,
8667 bool canMakeOtherLost,
8668 VkDeviceSize* pOffset,
8669 size_t* itemsToMakeLostCount,
8670 VkDeviceSize* pSumFreeSize,
8671 VkDeviceSize* pSumItemSize)
const
8673 VMA_ASSERT(allocSize > 0);
8674 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8675 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8676 VMA_ASSERT(pOffset != VMA_NULL);
8678 *itemsToMakeLostCount = 0;
8682 if(canMakeOtherLost)
8684 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8686 *pSumFreeSize = suballocItem->size;
8690 if(suballocItem->hAllocation->CanBecomeLost() &&
8691 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8693 ++*itemsToMakeLostCount;
8694 *pSumItemSize = suballocItem->size;
8703 if(GetSize() - suballocItem->offset < allocSize)
8709 *pOffset = suballocItem->offset;
8712 if(VMA_DEBUG_MARGIN > 0)
8714 *pOffset += VMA_DEBUG_MARGIN;
8718 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8722 if(bufferImageGranularity > 1)
8724 bool bufferImageGranularityConflict =
false;
8725 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8726 while(prevSuballocItem != m_Suballocations.cbegin())
8729 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8730 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8732 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8734 bufferImageGranularityConflict =
true;
8742 if(bufferImageGranularityConflict)
8744 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8750 if(*pOffset >= suballocItem->offset + suballocItem->size)
8756 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8759 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8761 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8763 if(suballocItem->offset + totalSize > GetSize())
8770 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8771 if(totalSize > suballocItem->size)
8773 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8774 while(remainingSize > 0)
8777 if(lastSuballocItem == m_Suballocations.cend())
8781 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8783 *pSumFreeSize += lastSuballocItem->size;
8787 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8788 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8789 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8791 ++*itemsToMakeLostCount;
8792 *pSumItemSize += lastSuballocItem->size;
8799 remainingSize = (lastSuballocItem->size < remainingSize) ?
8800 remainingSize - lastSuballocItem->size : 0;
8806 if(bufferImageGranularity > 1)
8808 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8810 while(nextSuballocItem != m_Suballocations.cend())
8812 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8813 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8815 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8817 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8818 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8819 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8821 ++*itemsToMakeLostCount;
8840 const VmaSuballocation& suballoc = *suballocItem;
8841 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8843 *pSumFreeSize = suballoc.size;
8846 if(suballoc.size < allocSize)
8852 *pOffset = suballoc.offset;
8855 if(VMA_DEBUG_MARGIN > 0)
8857 *pOffset += VMA_DEBUG_MARGIN;
8861 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8865 if(bufferImageGranularity > 1)
8867 bool bufferImageGranularityConflict =
false;
8868 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8869 while(prevSuballocItem != m_Suballocations.cbegin())
8872 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8873 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8875 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8877 bufferImageGranularityConflict =
true;
8885 if(bufferImageGranularityConflict)
8887 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8892 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8895 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8898 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8905 if(bufferImageGranularity > 1)
8907 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8909 while(nextSuballocItem != m_Suballocations.cend())
8911 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8912 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8914 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8933 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8935 VMA_ASSERT(item != m_Suballocations.end());
8936 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8938 VmaSuballocationList::iterator nextItem = item;
8940 VMA_ASSERT(nextItem != m_Suballocations.end());
8941 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8943 item->size += nextItem->size;
8945 m_Suballocations.erase(nextItem);
8948 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8951 VmaSuballocation& suballoc = *suballocItem;
8952 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8953 suballoc.hAllocation = VK_NULL_HANDLE;
8957 m_SumFreeSize += suballoc.size;
8960 bool mergeWithNext =
false;
8961 bool mergeWithPrev =
false;
8963 VmaSuballocationList::iterator nextItem = suballocItem;
8965 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8967 mergeWithNext =
true;
8970 VmaSuballocationList::iterator prevItem = suballocItem;
8971 if(suballocItem != m_Suballocations.begin())
8974 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8976 mergeWithPrev =
true;
8982 UnregisterFreeSuballocation(nextItem);
8983 MergeFreeWithNext(suballocItem);
8988 UnregisterFreeSuballocation(prevItem);
8989 MergeFreeWithNext(prevItem);
8990 RegisterFreeSuballocation(prevItem);
8995 RegisterFreeSuballocation(suballocItem);
8996 return suballocItem;
9000 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9002 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9003 VMA_ASSERT(item->size > 0);
9007 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9009 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9011 if(m_FreeSuballocationsBySize.empty())
9013 m_FreeSuballocationsBySize.push_back(item);
9017 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9025 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9027 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9028 VMA_ASSERT(item->size > 0);
9032 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9034 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9036 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9037 m_FreeSuballocationsBySize.data(),
9038 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9040 VmaSuballocationItemSizeLess());
9041 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9042 index < m_FreeSuballocationsBySize.size();
9045 if(m_FreeSuballocationsBySize[index] == item)
9047 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9050 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9052 VMA_ASSERT(0 &&
"Not found.");
9058 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9059 VkDeviceSize bufferImageGranularity,
9060 VmaSuballocationType& inOutPrevSuballocType)
const
9062 if(bufferImageGranularity == 1 || IsEmpty())
9067 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9068 bool typeConflictFound =
false;
9069 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9070 it != m_Suballocations.cend();
9073 const VmaSuballocationType suballocType = it->type;
9074 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9076 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9077 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9079 typeConflictFound =
true;
9081 inOutPrevSuballocType = suballocType;
9085 return typeConflictFound || minAlignment >= bufferImageGranularity;
9091 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9092 VmaBlockMetadata(hAllocator),
9094 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9095 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9096 m_1stVectorIndex(0),
9097 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9098 m_1stNullItemsBeginCount(0),
9099 m_1stNullItemsMiddleCount(0),
9100 m_2ndNullItemsCount(0)
9104 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9108 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9110 VmaBlockMetadata::Init(size);
9111 m_SumFreeSize = size;
9114 bool VmaBlockMetadata_Linear::Validate()
const
9116 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9117 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9119 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9120 VMA_VALIDATE(!suballocations1st.empty() ||
9121 suballocations2nd.empty() ||
9122 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9124 if(!suballocations1st.empty())
9127 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9129 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9131 if(!suballocations2nd.empty())
9134 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9137 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9138 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9140 VkDeviceSize sumUsedSize = 0;
9141 const size_t suballoc1stCount = suballocations1st.size();
9142 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9144 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9146 const size_t suballoc2ndCount = suballocations2nd.size();
9147 size_t nullItem2ndCount = 0;
9148 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9150 const VmaSuballocation& suballoc = suballocations2nd[i];
9151 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9153 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9154 VMA_VALIDATE(suballoc.offset >= offset);
9158 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9159 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9160 sumUsedSize += suballoc.size;
9167 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9170 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9173 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9175 const VmaSuballocation& suballoc = suballocations1st[i];
9176 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9177 suballoc.hAllocation == VK_NULL_HANDLE);
9180 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9182 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9184 const VmaSuballocation& suballoc = suballocations1st[i];
9185 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9187 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9188 VMA_VALIDATE(suballoc.offset >= offset);
9189 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9193 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9194 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9195 sumUsedSize += suballoc.size;
9202 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9204 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9206 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9208 const size_t suballoc2ndCount = suballocations2nd.size();
9209 size_t nullItem2ndCount = 0;
9210 for(
size_t i = suballoc2ndCount; i--; )
9212 const VmaSuballocation& suballoc = suballocations2nd[i];
9213 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9215 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9216 VMA_VALIDATE(suballoc.offset >= offset);
9220 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9221 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9222 sumUsedSize += suballoc.size;
9229 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9232 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9235 VMA_VALIDATE(offset <= GetSize());
9236 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9241 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9243 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9244 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9247 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9249 const VkDeviceSize size = GetSize();
9261 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9263 switch(m_2ndVectorMode)
9265 case SECOND_VECTOR_EMPTY:
9271 const size_t suballocations1stCount = suballocations1st.size();
9272 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9273 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9274 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9276 firstSuballoc.offset,
9277 size - (lastSuballoc.offset + lastSuballoc.size));
9281 case SECOND_VECTOR_RING_BUFFER:
9286 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9287 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9288 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9289 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9293 case SECOND_VECTOR_DOUBLE_STACK:
9298 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9299 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9300 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9301 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9311 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9313 const VkDeviceSize size = GetSize();
9314 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9315 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9316 const size_t suballoc1stCount = suballocations1st.size();
9317 const size_t suballoc2ndCount = suballocations2nd.size();
9328 VkDeviceSize lastOffset = 0;
9330 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9332 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9333 size_t nextAlloc2ndIndex = 0;
9334 while(lastOffset < freeSpace2ndTo1stEnd)
9337 while(nextAlloc2ndIndex < suballoc2ndCount &&
9338 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9340 ++nextAlloc2ndIndex;
9344 if(nextAlloc2ndIndex < suballoc2ndCount)
9346 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9349 if(lastOffset < suballoc.offset)
9352 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9366 lastOffset = suballoc.offset + suballoc.size;
9367 ++nextAlloc2ndIndex;
9373 if(lastOffset < freeSpace2ndTo1stEnd)
9375 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9383 lastOffset = freeSpace2ndTo1stEnd;
9388 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9389 const VkDeviceSize freeSpace1stTo2ndEnd =
9390 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9391 while(lastOffset < freeSpace1stTo2ndEnd)
9394 while(nextAlloc1stIndex < suballoc1stCount &&
9395 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9397 ++nextAlloc1stIndex;
9401 if(nextAlloc1stIndex < suballoc1stCount)
9403 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9406 if(lastOffset < suballoc.offset)
9409 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9423 lastOffset = suballoc.offset + suballoc.size;
9424 ++nextAlloc1stIndex;
9430 if(lastOffset < freeSpace1stTo2ndEnd)
9432 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9440 lastOffset = freeSpace1stTo2ndEnd;
9444 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9446 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9447 while(lastOffset < size)
9450 while(nextAlloc2ndIndex != SIZE_MAX &&
9451 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9453 --nextAlloc2ndIndex;
9457 if(nextAlloc2ndIndex != SIZE_MAX)
9459 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9462 if(lastOffset < suballoc.offset)
9465 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9479 lastOffset = suballoc.offset + suballoc.size;
9480 --nextAlloc2ndIndex;
9486 if(lastOffset < size)
9488 const VkDeviceSize unusedRangeSize = size - lastOffset;
9504 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
9506 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9507 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9508 const VkDeviceSize size = GetSize();
9509 const size_t suballoc1stCount = suballocations1st.size();
9510 const size_t suballoc2ndCount = suballocations2nd.size();
9512 inoutStats.
size += size;
9514 VkDeviceSize lastOffset = 0;
9516 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9518 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9519 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9520 while(lastOffset < freeSpace2ndTo1stEnd)
9523 while(nextAlloc2ndIndex < suballoc2ndCount &&
9524 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9526 ++nextAlloc2ndIndex;
9530 if(nextAlloc2ndIndex < suballoc2ndCount)
9532 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9535 if(lastOffset < suballoc.offset)
9538 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9549 lastOffset = suballoc.offset + suballoc.size;
9550 ++nextAlloc2ndIndex;
9555 if(lastOffset < freeSpace2ndTo1stEnd)
9558 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9565 lastOffset = freeSpace2ndTo1stEnd;
9570 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9571 const VkDeviceSize freeSpace1stTo2ndEnd =
9572 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9573 while(lastOffset < freeSpace1stTo2ndEnd)
9576 while(nextAlloc1stIndex < suballoc1stCount &&
9577 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9579 ++nextAlloc1stIndex;
9583 if(nextAlloc1stIndex < suballoc1stCount)
9585 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9588 if(lastOffset < suballoc.offset)
9591 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9602 lastOffset = suballoc.offset + suballoc.size;
9603 ++nextAlloc1stIndex;
9608 if(lastOffset < freeSpace1stTo2ndEnd)
9611 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9618 lastOffset = freeSpace1stTo2ndEnd;
9622 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9624 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9625 while(lastOffset < size)
9628 while(nextAlloc2ndIndex != SIZE_MAX &&
9629 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9631 --nextAlloc2ndIndex;
9635 if(nextAlloc2ndIndex != SIZE_MAX)
9637 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9640 if(lastOffset < suballoc.offset)
9643 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9654 lastOffset = suballoc.offset + suballoc.size;
9655 --nextAlloc2ndIndex;
9660 if(lastOffset < size)
9663 const VkDeviceSize unusedRangeSize = size - lastOffset;
9676 #if VMA_STATS_STRING_ENABLED
9677 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
9679 const VkDeviceSize size = GetSize();
9680 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9681 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9682 const size_t suballoc1stCount = suballocations1st.size();
9683 const size_t suballoc2ndCount = suballocations2nd.size();
9687 size_t unusedRangeCount = 0;
9688 VkDeviceSize usedBytes = 0;
9690 VkDeviceSize lastOffset = 0;
9692 size_t alloc2ndCount = 0;
9693 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9695 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9696 size_t nextAlloc2ndIndex = 0;
9697 while(lastOffset < freeSpace2ndTo1stEnd)
9700 while(nextAlloc2ndIndex < suballoc2ndCount &&
9701 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9703 ++nextAlloc2ndIndex;
9707 if(nextAlloc2ndIndex < suballoc2ndCount)
9709 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9712 if(lastOffset < suballoc.offset)
9721 usedBytes += suballoc.size;
9724 lastOffset = suballoc.offset + suballoc.size;
9725 ++nextAlloc2ndIndex;
9730 if(lastOffset < freeSpace2ndTo1stEnd)
9737 lastOffset = freeSpace2ndTo1stEnd;
9742 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9743 size_t alloc1stCount = 0;
9744 const VkDeviceSize freeSpace1stTo2ndEnd =
9745 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9746 while(lastOffset < freeSpace1stTo2ndEnd)
9749 while(nextAlloc1stIndex < suballoc1stCount &&
9750 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9752 ++nextAlloc1stIndex;
9756 if(nextAlloc1stIndex < suballoc1stCount)
9758 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9761 if(lastOffset < suballoc.offset)
9770 usedBytes += suballoc.size;
9773 lastOffset = suballoc.offset + suballoc.size;
9774 ++nextAlloc1stIndex;
9779 if(lastOffset < size)
9786 lastOffset = freeSpace1stTo2ndEnd;
9790 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9792 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9793 while(lastOffset < size)
9796 while(nextAlloc2ndIndex != SIZE_MAX &&
9797 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9799 --nextAlloc2ndIndex;
9803 if(nextAlloc2ndIndex != SIZE_MAX)
9805 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9808 if(lastOffset < suballoc.offset)
9817 usedBytes += suballoc.size;
9820 lastOffset = suballoc.offset + suballoc.size;
9821 --nextAlloc2ndIndex;
9826 if(lastOffset < size)
9838 const VkDeviceSize unusedBytes = size - usedBytes;
9839 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9844 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9846 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9847 size_t nextAlloc2ndIndex = 0;
9848 while(lastOffset < freeSpace2ndTo1stEnd)
9851 while(nextAlloc2ndIndex < suballoc2ndCount &&
9852 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9854 ++nextAlloc2ndIndex;
9858 if(nextAlloc2ndIndex < suballoc2ndCount)
9860 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9863 if(lastOffset < suballoc.offset)
9866 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9867 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9872 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9875 lastOffset = suballoc.offset + suballoc.size;
9876 ++nextAlloc2ndIndex;
9881 if(lastOffset < freeSpace2ndTo1stEnd)
9884 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9885 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9889 lastOffset = freeSpace2ndTo1stEnd;
9894 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9895 while(lastOffset < freeSpace1stTo2ndEnd)
9898 while(nextAlloc1stIndex < suballoc1stCount &&
9899 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9901 ++nextAlloc1stIndex;
9905 if(nextAlloc1stIndex < suballoc1stCount)
9907 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9910 if(lastOffset < suballoc.offset)
9913 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9914 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9919 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9922 lastOffset = suballoc.offset + suballoc.size;
9923 ++nextAlloc1stIndex;
9928 if(lastOffset < freeSpace1stTo2ndEnd)
9931 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9932 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9936 lastOffset = freeSpace1stTo2ndEnd;
9940 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9942 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9943 while(lastOffset < size)
9946 while(nextAlloc2ndIndex != SIZE_MAX &&
9947 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9949 --nextAlloc2ndIndex;
9953 if(nextAlloc2ndIndex != SIZE_MAX)
9955 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9958 if(lastOffset < suballoc.offset)
9961 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9962 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9967 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9970 lastOffset = suballoc.offset + suballoc.size;
9971 --nextAlloc2ndIndex;
9976 if(lastOffset < size)
9979 const VkDeviceSize unusedRangeSize = size - lastOffset;
9980 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9989 PrintDetailedMap_End(json);
9991 #endif // #if VMA_STATS_STRING_ENABLED
9993 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9994 uint32_t currentFrameIndex,
9995 uint32_t frameInUseCount,
9996 VkDeviceSize bufferImageGranularity,
9997 VkDeviceSize allocSize,
9998 VkDeviceSize allocAlignment,
10000 VmaSuballocationType allocType,
10001 bool canMakeOtherLost,
10003 VmaAllocationRequest* pAllocationRequest)
10005 VMA_ASSERT(allocSize > 0);
10006 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10007 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10008 VMA_HEAVY_ASSERT(Validate());
10009 return upperAddress ?
10010 CreateAllocationRequest_UpperAddress(
10011 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10012 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10013 CreateAllocationRequest_LowerAddress(
10014 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10015 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10018 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10019 uint32_t currentFrameIndex,
10020 uint32_t frameInUseCount,
10021 VkDeviceSize bufferImageGranularity,
10022 VkDeviceSize allocSize,
10023 VkDeviceSize allocAlignment,
10024 VmaSuballocationType allocType,
10025 bool canMakeOtherLost,
10027 VmaAllocationRequest* pAllocationRequest)
10029 const VkDeviceSize size = GetSize();
10030 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10031 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10033 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10035 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10040 if(allocSize > size)
10044 VkDeviceSize resultBaseOffset = size - allocSize;
10045 if(!suballocations2nd.empty())
10047 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10048 resultBaseOffset = lastSuballoc.offset - allocSize;
10049 if(allocSize > lastSuballoc.offset)
10056 VkDeviceSize resultOffset = resultBaseOffset;
10059 if(VMA_DEBUG_MARGIN > 0)
10061 if(resultOffset < VMA_DEBUG_MARGIN)
10065 resultOffset -= VMA_DEBUG_MARGIN;
10069 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10073 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10075 bool bufferImageGranularityConflict =
false;
10076 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10078 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10079 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10081 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10083 bufferImageGranularityConflict =
true;
10091 if(bufferImageGranularityConflict)
10093 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10098 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10099 suballocations1st.back().offset + suballocations1st.back().size :
10101 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10105 if(bufferImageGranularity > 1)
10107 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10109 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10110 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10112 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10126 pAllocationRequest->offset = resultOffset;
10127 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10128 pAllocationRequest->sumItemSize = 0;
10130 pAllocationRequest->itemsToMakeLostCount = 0;
10131 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10138 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10139 uint32_t currentFrameIndex,
10140 uint32_t frameInUseCount,
10141 VkDeviceSize bufferImageGranularity,
10142 VkDeviceSize allocSize,
10143 VkDeviceSize allocAlignment,
10144 VmaSuballocationType allocType,
10145 bool canMakeOtherLost,
10147 VmaAllocationRequest* pAllocationRequest)
10149 const VkDeviceSize size = GetSize();
10150 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10151 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10153 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10157 VkDeviceSize resultBaseOffset = 0;
10158 if(!suballocations1st.empty())
10160 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10161 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10165 VkDeviceSize resultOffset = resultBaseOffset;
10168 if(VMA_DEBUG_MARGIN > 0)
10170 resultOffset += VMA_DEBUG_MARGIN;
10174 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10178 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10180 bool bufferImageGranularityConflict =
false;
10181 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10183 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10184 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10186 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10188 bufferImageGranularityConflict =
true;
10196 if(bufferImageGranularityConflict)
10198 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10202 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10203 suballocations2nd.back().offset : size;
10206 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10210 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10212 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10214 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10215 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10217 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10231 pAllocationRequest->offset = resultOffset;
10232 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10233 pAllocationRequest->sumItemSize = 0;
10235 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10236 pAllocationRequest->itemsToMakeLostCount = 0;
10243 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10245 VMA_ASSERT(!suballocations1st.empty());
10247 VkDeviceSize resultBaseOffset = 0;
10248 if(!suballocations2nd.empty())
10250 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10251 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10255 VkDeviceSize resultOffset = resultBaseOffset;
10258 if(VMA_DEBUG_MARGIN > 0)
10260 resultOffset += VMA_DEBUG_MARGIN;
10264 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10268 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10270 bool bufferImageGranularityConflict =
false;
10271 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10273 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10274 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10276 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10278 bufferImageGranularityConflict =
true;
10286 if(bufferImageGranularityConflict)
10288 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10292 pAllocationRequest->itemsToMakeLostCount = 0;
10293 pAllocationRequest->sumItemSize = 0;
10294 size_t index1st = m_1stNullItemsBeginCount;
10296 if(canMakeOtherLost)
10298 while(index1st < suballocations1st.size() &&
10299 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10302 const VmaSuballocation& suballoc = suballocations1st[index1st];
10303 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10309 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10310 if(suballoc.hAllocation->CanBecomeLost() &&
10311 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10313 ++pAllocationRequest->itemsToMakeLostCount;
10314 pAllocationRequest->sumItemSize += suballoc.size;
10326 if(bufferImageGranularity > 1)
10328 while(index1st < suballocations1st.size())
10330 const VmaSuballocation& suballoc = suballocations1st[index1st];
10331 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10333 if(suballoc.hAllocation != VK_NULL_HANDLE)
10336 if(suballoc.hAllocation->CanBecomeLost() &&
10337 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10339 ++pAllocationRequest->itemsToMakeLostCount;
10340 pAllocationRequest->sumItemSize += suballoc.size;
10358 if(index1st == suballocations1st.size() &&
10359 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10362 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10367 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10368 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10372 if(bufferImageGranularity > 1)
10374 for(
size_t nextSuballocIndex = index1st;
10375 nextSuballocIndex < suballocations1st.size();
10376 nextSuballocIndex++)
10378 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10379 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10381 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10395 pAllocationRequest->offset = resultOffset;
10396 pAllocationRequest->sumFreeSize =
10397 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10399 - pAllocationRequest->sumItemSize;
10400 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10409 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10410 uint32_t currentFrameIndex,
10411 uint32_t frameInUseCount,
10412 VmaAllocationRequest* pAllocationRequest)
10414 if(pAllocationRequest->itemsToMakeLostCount == 0)
10419 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10422 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10423 size_t index = m_1stNullItemsBeginCount;
10424 size_t madeLostCount = 0;
10425 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10427 if(index == suballocations->size())
10431 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10433 suballocations = &AccessSuballocations2nd();
10437 VMA_ASSERT(!suballocations->empty());
10439 VmaSuballocation& suballoc = (*suballocations)[index];
10440 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10442 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10443 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10444 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10446 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10447 suballoc.hAllocation = VK_NULL_HANDLE;
10448 m_SumFreeSize += suballoc.size;
10449 if(suballocations == &AccessSuballocations1st())
10451 ++m_1stNullItemsMiddleCount;
10455 ++m_2ndNullItemsCount;
10467 CleanupAfterFree();
10473 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10475 uint32_t lostAllocationCount = 0;
10477 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10478 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10480 VmaSuballocation& suballoc = suballocations1st[i];
10481 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10482 suballoc.hAllocation->CanBecomeLost() &&
10483 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10485 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10486 suballoc.hAllocation = VK_NULL_HANDLE;
10487 ++m_1stNullItemsMiddleCount;
10488 m_SumFreeSize += suballoc.size;
10489 ++lostAllocationCount;
10493 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10494 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10496 VmaSuballocation& suballoc = suballocations2nd[i];
10497 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10498 suballoc.hAllocation->CanBecomeLost() &&
10499 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10501 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10502 suballoc.hAllocation = VK_NULL_HANDLE;
10503 ++m_2ndNullItemsCount;
10504 m_SumFreeSize += suballoc.size;
10505 ++lostAllocationCount;
10509 if(lostAllocationCount)
10511 CleanupAfterFree();
10514 return lostAllocationCount;
10517 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10519 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10520 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10522 const VmaSuballocation& suballoc = suballocations1st[i];
10523 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10525 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10527 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10528 return VK_ERROR_VALIDATION_FAILED_EXT;
10530 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10532 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10533 return VK_ERROR_VALIDATION_FAILED_EXT;
10538 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10539 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10541 const VmaSuballocation& suballoc = suballocations2nd[i];
10542 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10544 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10546 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10547 return VK_ERROR_VALIDATION_FAILED_EXT;
10549 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10551 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10552 return VK_ERROR_VALIDATION_FAILED_EXT;
10560 void VmaBlockMetadata_Linear::Alloc(
10561 const VmaAllocationRequest& request,
10562 VmaSuballocationType type,
10563 VkDeviceSize allocSize,
10566 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10568 switch(request.type)
10570 case VmaAllocationRequestType::UpperAddress:
10572 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10573 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10574 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10575 suballocations2nd.push_back(newSuballoc);
10576 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10579 case VmaAllocationRequestType::EndOf1st:
10581 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10583 VMA_ASSERT(suballocations1st.empty() ||
10584 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10586 VMA_ASSERT(request.offset + allocSize <= GetSize());
10588 suballocations1st.push_back(newSuballoc);
10591 case VmaAllocationRequestType::EndOf2nd:
10593 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10595 VMA_ASSERT(!suballocations1st.empty() &&
10596 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10597 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10599 switch(m_2ndVectorMode)
10601 case SECOND_VECTOR_EMPTY:
10603 VMA_ASSERT(suballocations2nd.empty());
10604 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10606 case SECOND_VECTOR_RING_BUFFER:
10608 VMA_ASSERT(!suballocations2nd.empty());
10610 case SECOND_VECTOR_DOUBLE_STACK:
10611 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10617 suballocations2nd.push_back(newSuballoc);
10621 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10624 m_SumFreeSize -= newSuballoc.size;
10627 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10629 FreeAtOffset(allocation->GetOffset());
10632 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10634 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10635 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10637 if(!suballocations1st.empty())
10640 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10641 if(firstSuballoc.offset == offset)
10643 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10644 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10645 m_SumFreeSize += firstSuballoc.size;
10646 ++m_1stNullItemsBeginCount;
10647 CleanupAfterFree();
10653 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10654 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10656 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10657 if(lastSuballoc.offset == offset)
10659 m_SumFreeSize += lastSuballoc.size;
10660 suballocations2nd.pop_back();
10661 CleanupAfterFree();
10666 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10668 VmaSuballocation& lastSuballoc = suballocations1st.back();
10669 if(lastSuballoc.offset == offset)
10671 m_SumFreeSize += lastSuballoc.size;
10672 suballocations1st.pop_back();
10673 CleanupAfterFree();
10680 VmaSuballocation refSuballoc;
10681 refSuballoc.offset = offset;
10683 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10684 suballocations1st.begin() + m_1stNullItemsBeginCount,
10685 suballocations1st.end(),
10687 VmaSuballocationOffsetLess());
10688 if(it != suballocations1st.end())
10690 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10691 it->hAllocation = VK_NULL_HANDLE;
10692 ++m_1stNullItemsMiddleCount;
10693 m_SumFreeSize += it->size;
10694 CleanupAfterFree();
10699 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10702 VmaSuballocation refSuballoc;
10703 refSuballoc.offset = offset;
10705 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10706 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10707 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10708 if(it != suballocations2nd.end())
10710 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10711 it->hAllocation = VK_NULL_HANDLE;
10712 ++m_2ndNullItemsCount;
10713 m_SumFreeSize += it->size;
10714 CleanupAfterFree();
10719 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10722 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
10724 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10725 const size_t suballocCount = AccessSuballocations1st().size();
10726 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10729 void VmaBlockMetadata_Linear::CleanupAfterFree()
10731 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10732 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10736 suballocations1st.clear();
10737 suballocations2nd.clear();
10738 m_1stNullItemsBeginCount = 0;
10739 m_1stNullItemsMiddleCount = 0;
10740 m_2ndNullItemsCount = 0;
10741 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10745 const size_t suballoc1stCount = suballocations1st.size();
10746 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10747 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10750 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10751 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10753 ++m_1stNullItemsBeginCount;
10754 --m_1stNullItemsMiddleCount;
10758 while(m_1stNullItemsMiddleCount > 0 &&
10759 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10761 --m_1stNullItemsMiddleCount;
10762 suballocations1st.pop_back();
10766 while(m_2ndNullItemsCount > 0 &&
10767 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10769 --m_2ndNullItemsCount;
10770 suballocations2nd.pop_back();
10774 while(m_2ndNullItemsCount > 0 &&
10775 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10777 --m_2ndNullItemsCount;
10778 VmaVectorRemove(suballocations2nd, 0);
10781 if(ShouldCompact1st())
10783 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10784 size_t srcIndex = m_1stNullItemsBeginCount;
10785 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10787 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10791 if(dstIndex != srcIndex)
10793 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10797 suballocations1st.resize(nonNullItemCount);
10798 m_1stNullItemsBeginCount = 0;
10799 m_1stNullItemsMiddleCount = 0;
10803 if(suballocations2nd.empty())
10805 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10809 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10811 suballocations1st.clear();
10812 m_1stNullItemsBeginCount = 0;
10814 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10817 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10818 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10819 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10820 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10822 ++m_1stNullItemsBeginCount;
10823 --m_1stNullItemsMiddleCount;
10825 m_2ndNullItemsCount = 0;
10826 m_1stVectorIndex ^= 1;
10831 VMA_HEAVY_ASSERT(Validate());
10838 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10839 VmaBlockMetadata(hAllocator),
10841 m_AllocationCount(0),
10845 memset(m_FreeList, 0,
sizeof(m_FreeList));
10848 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10850 DeleteNode(m_Root);
10853 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10855 VmaBlockMetadata::Init(size);
10857 m_UsableSize = VmaPrevPow2(size);
10858 m_SumFreeSize = m_UsableSize;
10862 while(m_LevelCount < MAX_LEVELS &&
10863 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10868 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10869 rootNode->offset = 0;
10870 rootNode->type = Node::TYPE_FREE;
10871 rootNode->parent = VMA_NULL;
10872 rootNode->buddy = VMA_NULL;
10875 AddToFreeListFront(0, rootNode);
10878 bool VmaBlockMetadata_Buddy::Validate()
const
10881 ValidationContext ctx;
10882 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10884 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10886 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10887 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10890 for(uint32_t level = 0; level < m_LevelCount; ++level)
10892 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10893 m_FreeList[level].front->free.prev == VMA_NULL);
10895 for(Node* node = m_FreeList[level].front;
10897 node = node->free.next)
10899 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10901 if(node->free.next == VMA_NULL)
10903 VMA_VALIDATE(m_FreeList[level].back == node);
10907 VMA_VALIDATE(node->free.next->free.prev == node);
10913 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10915 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10921 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
10923 for(uint32_t level = 0; level < m_LevelCount; ++level)
10925 if(m_FreeList[level].front != VMA_NULL)
10927 return LevelToNodeSize(level);
10933 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10935 const VkDeviceSize unusableSize = GetUnusableSize();
10946 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10948 if(unusableSize > 0)
10957 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
10959 const VkDeviceSize unusableSize = GetUnusableSize();
10961 inoutStats.
size += GetSize();
10962 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10967 if(unusableSize > 0)
10974 #if VMA_STATS_STRING_ENABLED
10976 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
10980 CalcAllocationStatInfo(stat);
10982 PrintDetailedMap_Begin(
10988 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10990 const VkDeviceSize unusableSize = GetUnusableSize();
10991 if(unusableSize > 0)
10993 PrintDetailedMap_UnusedRange(json,
10998 PrintDetailedMap_End(json);
11001 #endif // #if VMA_STATS_STRING_ENABLED
11003 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11004 uint32_t currentFrameIndex,
11005 uint32_t frameInUseCount,
11006 VkDeviceSize bufferImageGranularity,
11007 VkDeviceSize allocSize,
11008 VkDeviceSize allocAlignment,
11010 VmaSuballocationType allocType,
11011 bool canMakeOtherLost,
11013 VmaAllocationRequest* pAllocationRequest)
11015 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11019 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11020 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11021 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11023 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11024 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11027 if(allocSize > m_UsableSize)
11032 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11033 for(uint32_t level = targetLevel + 1; level--; )
11035 for(Node* freeNode = m_FreeList[level].front;
11036 freeNode != VMA_NULL;
11037 freeNode = freeNode->free.next)
11039 if(freeNode->offset % allocAlignment == 0)
11041 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11042 pAllocationRequest->offset = freeNode->offset;
11043 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11044 pAllocationRequest->sumItemSize = 0;
11045 pAllocationRequest->itemsToMakeLostCount = 0;
11046 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11055 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11056 uint32_t currentFrameIndex,
11057 uint32_t frameInUseCount,
11058 VmaAllocationRequest* pAllocationRequest)
11064 return pAllocationRequest->itemsToMakeLostCount == 0;
11067 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11076 void VmaBlockMetadata_Buddy::Alloc(
11077 const VmaAllocationRequest& request,
11078 VmaSuballocationType type,
11079 VkDeviceSize allocSize,
11082 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11084 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11085 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11087 Node* currNode = m_FreeList[currLevel].front;
11088 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11089 while(currNode->offset != request.offset)
11091 currNode = currNode->free.next;
11092 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11096 while(currLevel < targetLevel)
11100 RemoveFromFreeList(currLevel, currNode);
11102 const uint32_t childrenLevel = currLevel + 1;
11105 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11106 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11108 leftChild->offset = currNode->offset;
11109 leftChild->type = Node::TYPE_FREE;
11110 leftChild->parent = currNode;
11111 leftChild->buddy = rightChild;
11113 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11114 rightChild->type = Node::TYPE_FREE;
11115 rightChild->parent = currNode;
11116 rightChild->buddy = leftChild;
11119 currNode->type = Node::TYPE_SPLIT;
11120 currNode->split.leftChild = leftChild;
11123 AddToFreeListFront(childrenLevel, rightChild);
11124 AddToFreeListFront(childrenLevel, leftChild);
11129 currNode = m_FreeList[currLevel].front;
11138 VMA_ASSERT(currLevel == targetLevel &&
11139 currNode != VMA_NULL &&
11140 currNode->type == Node::TYPE_FREE);
11141 RemoveFromFreeList(currLevel, currNode);
11144 currNode->type = Node::TYPE_ALLOCATION;
11145 currNode->allocation.alloc = hAllocation;
11147 ++m_AllocationCount;
11149 m_SumFreeSize -= allocSize;
11152 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11154 if(node->type == Node::TYPE_SPLIT)
11156 DeleteNode(node->split.leftChild->buddy);
11157 DeleteNode(node->split.leftChild);
11160 vma_delete(GetAllocationCallbacks(), node);
11163 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11165 VMA_VALIDATE(level < m_LevelCount);
11166 VMA_VALIDATE(curr->parent == parent);
11167 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11168 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11171 case Node::TYPE_FREE:
11173 ctx.calculatedSumFreeSize += levelNodeSize;
11174 ++ctx.calculatedFreeCount;
11176 case Node::TYPE_ALLOCATION:
11177 ++ctx.calculatedAllocationCount;
11178 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11179 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11181 case Node::TYPE_SPLIT:
11183 const uint32_t childrenLevel = level + 1;
11184 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11185 const Node*
const leftChild = curr->split.leftChild;
11186 VMA_VALIDATE(leftChild != VMA_NULL);
11187 VMA_VALIDATE(leftChild->offset == curr->offset);
11188 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11190 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11192 const Node*
const rightChild = leftChild->buddy;
11193 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11194 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11196 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11207 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11210 uint32_t level = 0;
11211 VkDeviceSize currLevelNodeSize = m_UsableSize;
11212 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11213 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11216 currLevelNodeSize = nextLevelNodeSize;
11217 nextLevelNodeSize = currLevelNodeSize >> 1;
11222 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11225 Node* node = m_Root;
11226 VkDeviceSize nodeOffset = 0;
11227 uint32_t level = 0;
11228 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11229 while(node->type == Node::TYPE_SPLIT)
11231 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11232 if(offset < nodeOffset + nextLevelSize)
11234 node = node->split.leftChild;
11238 node = node->split.leftChild->buddy;
11239 nodeOffset += nextLevelSize;
11242 levelNodeSize = nextLevelSize;
11245 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11246 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11249 --m_AllocationCount;
11250 m_SumFreeSize += alloc->GetSize();
11252 node->type = Node::TYPE_FREE;
11255 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11257 RemoveFromFreeList(level, node->buddy);
11258 Node*
const parent = node->parent;
11260 vma_delete(GetAllocationCallbacks(), node->buddy);
11261 vma_delete(GetAllocationCallbacks(), node);
11262 parent->type = Node::TYPE_FREE;
11270 AddToFreeListFront(level, node);
11273 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11277 case Node::TYPE_FREE:
11283 case Node::TYPE_ALLOCATION:
11285 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11291 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11292 if(unusedRangeSize > 0)
11301 case Node::TYPE_SPLIT:
11303 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11304 const Node*
const leftChild = node->split.leftChild;
11305 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11306 const Node*
const rightChild = leftChild->buddy;
11307 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11315 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11317 VMA_ASSERT(node->type == Node::TYPE_FREE);
11320 Node*
const frontNode = m_FreeList[level].front;
11321 if(frontNode == VMA_NULL)
11323 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11324 node->free.prev = node->free.next = VMA_NULL;
11325 m_FreeList[level].front = m_FreeList[level].back = node;
11329 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11330 node->free.prev = VMA_NULL;
11331 node->free.next = frontNode;
11332 frontNode->free.prev = node;
11333 m_FreeList[level].front = node;
11337 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11339 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11342 if(node->free.prev == VMA_NULL)
11344 VMA_ASSERT(m_FreeList[level].front == node);
11345 m_FreeList[level].front = node->free.next;
11349 Node*
const prevFreeNode = node->free.prev;
11350 VMA_ASSERT(prevFreeNode->free.next == node);
11351 prevFreeNode->free.next = node->free.next;
11355 if(node->free.next == VMA_NULL)
11357 VMA_ASSERT(m_FreeList[level].back == node);
11358 m_FreeList[level].back = node->free.prev;
11362 Node*
const nextFreeNode = node->free.next;
11363 VMA_ASSERT(nextFreeNode->free.prev == node);
11364 nextFreeNode->free.prev = node->free.prev;
11368 #if VMA_STATS_STRING_ENABLED
11369 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
11373 case Node::TYPE_FREE:
11374 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11376 case Node::TYPE_ALLOCATION:
11378 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11379 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11380 if(allocSize < levelNodeSize)
11382 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11386 case Node::TYPE_SPLIT:
11388 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11389 const Node*
const leftChild = node->split.leftChild;
11390 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11391 const Node*
const rightChild = leftChild->buddy;
11392 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11399 #endif // #if VMA_STATS_STRING_ENABLED
11405 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11406 m_pMetadata(VMA_NULL),
11407 m_MemoryTypeIndex(UINT32_MAX),
11409 m_hMemory(VK_NULL_HANDLE),
11411 m_pMappedData(VMA_NULL)
11415 void VmaDeviceMemoryBlock::Init(
11418 uint32_t newMemoryTypeIndex,
11419 VkDeviceMemory newMemory,
11420 VkDeviceSize newSize,
11422 uint32_t algorithm)
11424 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11426 m_hParentPool = hParentPool;
11427 m_MemoryTypeIndex = newMemoryTypeIndex;
11429 m_hMemory = newMemory;
11434 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11437 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11443 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11445 m_pMetadata->Init(newSize);
11448 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11452 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11454 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11455 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11456 m_hMemory = VK_NULL_HANDLE;
11458 vma_delete(allocator, m_pMetadata);
11459 m_pMetadata = VMA_NULL;
11462 bool VmaDeviceMemoryBlock::Validate()
const
11464 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11465 (m_pMetadata->GetSize() != 0));
11467 return m_pMetadata->Validate();
11470 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11472 void* pData =
nullptr;
11473 VkResult res = Map(hAllocator, 1, &pData);
11474 if(res != VK_SUCCESS)
11479 res = m_pMetadata->CheckCorruption(pData);
11481 Unmap(hAllocator, 1);
11486 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11493 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11494 if(m_MapCount != 0)
11496 m_MapCount += count;
11497 VMA_ASSERT(m_pMappedData != VMA_NULL);
11498 if(ppData != VMA_NULL)
11500 *ppData = m_pMappedData;
11506 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11507 hAllocator->m_hDevice,
11513 if(result == VK_SUCCESS)
11515 if(ppData != VMA_NULL)
11517 *ppData = m_pMappedData;
11519 m_MapCount = count;
11525 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11532 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11533 if(m_MapCount >= count)
11535 m_MapCount -= count;
11536 if(m_MapCount == 0)
11538 m_pMappedData = VMA_NULL;
11539 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11544 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11548 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11550 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11551 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11554 VkResult res = Map(hAllocator, 1, &pData);
11555 if(res != VK_SUCCESS)
11560 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11561 VmaWriteMagicValue(pData, allocOffset + allocSize);
11563 Unmap(hAllocator, 1);
11568 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11570 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11571 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11574 VkResult res = Map(hAllocator, 1, &pData);
11575 if(res != VK_SUCCESS)
11580 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11582 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11584 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11586 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11589 Unmap(hAllocator, 1);
11594 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11597 VkDeviceSize allocationLocalOffset,
11601 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11602 hAllocation->GetBlock() ==
this);
11603 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11604 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11605 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11607 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11608 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11611 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11614 VkDeviceSize allocationLocalOffset,
11618 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11619 hAllocation->GetBlock() ==
this);
11620 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11621 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11622 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11624 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11625 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11630 memset(&outInfo, 0,
sizeof(outInfo));
11649 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11657 VmaPool_T::VmaPool_T(
11660 VkDeviceSize preferredBlockSize) :
11664 createInfo.memoryTypeIndex,
11665 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11666 createInfo.minBlockCount,
11667 createInfo.maxBlockCount,
11669 createInfo.frameInUseCount,
11670 createInfo.blockSize != 0,
11677 VmaPool_T::~VmaPool_T()
11681 void VmaPool_T::SetName(
const char* pName)
11683 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
11684 VmaFreeString(allocs, m_Name);
11686 if(pName != VMA_NULL)
11688 m_Name = VmaCreateStringCopy(allocs, pName);
11696 #if VMA_STATS_STRING_ENABLED
11698 #endif // #if VMA_STATS_STRING_ENABLED
11700 VmaBlockVector::VmaBlockVector(
11703 uint32_t memoryTypeIndex,
11704 VkDeviceSize preferredBlockSize,
11705 size_t minBlockCount,
11706 size_t maxBlockCount,
11707 VkDeviceSize bufferImageGranularity,
11708 uint32_t frameInUseCount,
11709 bool explicitBlockSize,
11710 uint32_t algorithm) :
11711 m_hAllocator(hAllocator),
11712 m_hParentPool(hParentPool),
11713 m_MemoryTypeIndex(memoryTypeIndex),
11714 m_PreferredBlockSize(preferredBlockSize),
11715 m_MinBlockCount(minBlockCount),
11716 m_MaxBlockCount(maxBlockCount),
11717 m_BufferImageGranularity(bufferImageGranularity),
11718 m_FrameInUseCount(frameInUseCount),
11719 m_ExplicitBlockSize(explicitBlockSize),
11720 m_Algorithm(algorithm),
11721 m_HasEmptyBlock(false),
11722 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11727 VmaBlockVector::~VmaBlockVector()
11729 for(
size_t i = m_Blocks.size(); i--; )
11731 m_Blocks[i]->Destroy(m_hAllocator);
11732 vma_delete(m_hAllocator, m_Blocks[i]);
11736 VkResult VmaBlockVector::CreateMinBlocks()
11738 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11740 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11741 if(res != VK_SUCCESS)
11749 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11751 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11753 const size_t blockCount = m_Blocks.size();
11762 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11764 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11765 VMA_ASSERT(pBlock);
11766 VMA_HEAVY_ASSERT(pBlock->Validate());
11767 pBlock->m_pMetadata->AddPoolStats(*pStats);
11771 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
11773 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11774 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11775 (VMA_DEBUG_MARGIN > 0) &&
11777 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11780 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11782 VkResult VmaBlockVector::Allocate(
11783 uint32_t currentFrameIndex,
11785 VkDeviceSize alignment,
11787 VmaSuballocationType suballocType,
11788 size_t allocationCount,
11792 VkResult res = VK_SUCCESS;
11794 if(IsCorruptionDetectionEnabled())
11796 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11797 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11801 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11802 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11804 res = AllocatePage(
11810 pAllocations + allocIndex);
11811 if(res != VK_SUCCESS)
11818 if(res != VK_SUCCESS)
11821 while(allocIndex--)
11823 Free(pAllocations[allocIndex]);
11825 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11831 VkResult VmaBlockVector::AllocatePage(
11832 uint32_t currentFrameIndex,
11834 VkDeviceSize alignment,
11836 VmaSuballocationType suballocType,
11845 VkDeviceSize freeMemory;
11847 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
11849 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
11853 const bool canFallbackToDedicated = !IsCustomPool();
11854 const bool canCreateNewBlock =
11856 (m_Blocks.size() < m_MaxBlockCount) &&
11857 (freeMemory >= size || !canFallbackToDedicated);
11864 canMakeOtherLost =
false;
11868 if(isUpperAddress &&
11871 return VK_ERROR_FEATURE_NOT_PRESENT;
11885 return VK_ERROR_FEATURE_NOT_PRESENT;
11889 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11891 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11899 if(!canMakeOtherLost || canCreateNewBlock)
11908 if(!m_Blocks.empty())
11910 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11911 VMA_ASSERT(pCurrBlock);
11912 VkResult res = AllocateFromBlock(
11922 if(res == VK_SUCCESS)
11924 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
11934 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11936 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11937 VMA_ASSERT(pCurrBlock);
11938 VkResult res = AllocateFromBlock(
11948 if(res == VK_SUCCESS)
11950 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
11958 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11960 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11961 VMA_ASSERT(pCurrBlock);
11962 VkResult res = AllocateFromBlock(
11972 if(res == VK_SUCCESS)
11974 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
11982 if(canCreateNewBlock)
11985 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11986 uint32_t newBlockSizeShift = 0;
11987 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11989 if(!m_ExplicitBlockSize)
11992 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11993 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11995 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11996 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11998 newBlockSize = smallerNewBlockSize;
11999 ++newBlockSizeShift;
12008 size_t newBlockIndex = 0;
12009 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12010 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12012 if(!m_ExplicitBlockSize)
12014 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12016 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12017 if(smallerNewBlockSize >= size)
12019 newBlockSize = smallerNewBlockSize;
12020 ++newBlockSizeShift;
12021 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12022 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12031 if(res == VK_SUCCESS)
12033 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12034 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12036 res = AllocateFromBlock(
12046 if(res == VK_SUCCESS)
12048 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12054 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12061 if(canMakeOtherLost)
12063 uint32_t tryIndex = 0;
12064 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12066 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12067 VmaAllocationRequest bestRequest = {};
12068 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12074 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12076 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12077 VMA_ASSERT(pCurrBlock);
12078 VmaAllocationRequest currRequest = {};
12079 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12082 m_BufferImageGranularity,
12091 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12092 if(pBestRequestBlock == VMA_NULL ||
12093 currRequestCost < bestRequestCost)
12095 pBestRequestBlock = pCurrBlock;
12096 bestRequest = currRequest;
12097 bestRequestCost = currRequestCost;
12099 if(bestRequestCost == 0)
12110 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12112 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12113 VMA_ASSERT(pCurrBlock);
12114 VmaAllocationRequest currRequest = {};
12115 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12118 m_BufferImageGranularity,
12127 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12128 if(pBestRequestBlock == VMA_NULL ||
12129 currRequestCost < bestRequestCost ||
12132 pBestRequestBlock = pCurrBlock;
12133 bestRequest = currRequest;
12134 bestRequestCost = currRequestCost;
12136 if(bestRequestCost == 0 ||
12146 if(pBestRequestBlock != VMA_NULL)
12150 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12151 if(res != VK_SUCCESS)
12157 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12163 if(pBestRequestBlock->m_pMetadata->IsEmpty())
12165 m_HasEmptyBlock =
false;
12168 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12169 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12170 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12171 (*pAllocation)->InitBlockAllocation(
12173 bestRequest.offset,
12180 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12181 VMA_DEBUG_LOG(
" Returned from existing block");
12182 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12183 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12184 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12186 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12188 if(IsCorruptionDetectionEnabled())
12190 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12191 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12206 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12208 return VK_ERROR_TOO_MANY_OBJECTS;
12212 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12215 void VmaBlockVector::Free(
12218 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12220 bool budgetExceeded =
false;
12222 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12224 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12225 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12230 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12232 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12234 if(IsCorruptionDetectionEnabled())
12236 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12237 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12240 if(hAllocation->IsPersistentMap())
12242 pBlock->Unmap(m_hAllocator, 1);
12245 pBlock->m_pMetadata->Free(hAllocation);
12246 VMA_HEAVY_ASSERT(pBlock->Validate());
12248 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12250 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12252 if(pBlock->m_pMetadata->IsEmpty())
12255 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12257 pBlockToDelete = pBlock;
12263 m_HasEmptyBlock =
true;
12268 else if(m_HasEmptyBlock && canDeleteBlock)
12270 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12271 if(pLastBlock->m_pMetadata->IsEmpty())
12273 pBlockToDelete = pLastBlock;
12274 m_Blocks.pop_back();
12275 m_HasEmptyBlock =
false;
12279 IncrementallySortBlocks();
12284 if(pBlockToDelete != VMA_NULL)
12286 VMA_DEBUG_LOG(
" Deleted empty block");
12287 pBlockToDelete->Destroy(m_hAllocator);
12288 vma_delete(m_hAllocator, pBlockToDelete);
12292 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12294 VkDeviceSize result = 0;
12295 for(
size_t i = m_Blocks.size(); i--; )
12297 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12298 if(result >= m_PreferredBlockSize)
12306 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12308 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12310 if(m_Blocks[blockIndex] == pBlock)
12312 VmaVectorRemove(m_Blocks, blockIndex);
12319 void VmaBlockVector::IncrementallySortBlocks()
12324 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12326 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12328 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12335 VkResult VmaBlockVector::AllocateFromBlock(
12336 VmaDeviceMemoryBlock* pBlock,
12337 uint32_t currentFrameIndex,
12339 VkDeviceSize alignment,
12342 VmaSuballocationType suballocType,
12351 VmaAllocationRequest currRequest = {};
12352 if(pBlock->m_pMetadata->CreateAllocationRequest(
12355 m_BufferImageGranularity,
12365 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12369 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12370 if(res != VK_SUCCESS)
12377 if(pBlock->m_pMetadata->IsEmpty())
12379 m_HasEmptyBlock =
false;
12382 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12383 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12384 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12385 (*pAllocation)->InitBlockAllocation(
12387 currRequest.offset,
12394 VMA_HEAVY_ASSERT(pBlock->Validate());
12395 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12396 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12397 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12399 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12401 if(IsCorruptionDetectionEnabled())
12403 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12404 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12408 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12411 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12413 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12414 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12415 allocInfo.allocationSize = blockSize;
12416 VkDeviceMemory mem = VK_NULL_HANDLE;
12417 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12426 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12432 allocInfo.allocationSize,
12436 m_Blocks.push_back(pBlock);
12437 if(pNewBlockIndex != VMA_NULL)
12439 *pNewBlockIndex = m_Blocks.size() - 1;
12445 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12446 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12447 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12449 const size_t blockCount = m_Blocks.size();
12450 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12454 BLOCK_FLAG_USED = 0x00000001,
12455 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12463 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12464 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12465 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12468 const size_t moveCount = moves.size();
12469 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12471 const VmaDefragmentationMove& move = moves[moveIndex];
12472 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12473 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12476 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12479 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12481 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12482 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12483 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12485 currBlockInfo.pMappedData = pBlock->GetMappedData();
12487 if(currBlockInfo.pMappedData == VMA_NULL)
12489 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12490 if(pDefragCtx->res == VK_SUCCESS)
12492 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12499 if(pDefragCtx->res == VK_SUCCESS)
12501 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12502 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12504 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12506 const VmaDefragmentationMove& move = moves[moveIndex];
12508 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12509 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12511 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12516 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12517 memRange.memory = pSrcBlock->GetDeviceMemory();
12518 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12519 memRange.size = VMA_MIN(
12520 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12521 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12522 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12527 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12528 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12529 static_cast<size_t>(move.size));
12531 if(IsCorruptionDetectionEnabled())
12533 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12534 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12540 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12541 memRange.memory = pDstBlock->GetDeviceMemory();
12542 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12543 memRange.size = VMA_MIN(
12544 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12545 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12546 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12553 for(
size_t blockIndex = blockCount; blockIndex--; )
12555 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12556 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12558 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12559 pBlock->Unmap(m_hAllocator, 1);
12564 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12565 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12566 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12567 VkCommandBuffer commandBuffer)
12569 const size_t blockCount = m_Blocks.size();
12571 pDefragCtx->blockContexts.resize(blockCount);
12572 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12575 const size_t moveCount = moves.size();
12576 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12578 const VmaDefragmentationMove& move = moves[moveIndex];
12579 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12580 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12583 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12587 VkBufferCreateInfo bufCreateInfo;
12588 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12590 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12592 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12593 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12594 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12596 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12597 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12598 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12599 if(pDefragCtx->res == VK_SUCCESS)
12601 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12602 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12609 if(pDefragCtx->res == VK_SUCCESS)
12611 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12613 const VmaDefragmentationMove& move = moves[moveIndex];
12615 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12616 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12618 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12620 VkBufferCopy region = {
12624 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12625 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12630 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12632 pDefragCtx->res = VK_NOT_READY;
12638 m_HasEmptyBlock =
false;
12639 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12641 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12642 if(pBlock->m_pMetadata->IsEmpty())
12644 if(m_Blocks.size() > m_MinBlockCount)
12646 if(pDefragmentationStats != VMA_NULL)
12649 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12652 VmaVectorRemove(m_Blocks, blockIndex);
12653 pBlock->Destroy(m_hAllocator);
12654 vma_delete(m_hAllocator, pBlock);
12658 m_HasEmptyBlock =
true;
12664 #if VMA_STATS_STRING_ENABLED
12666 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12668 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12670 json.BeginObject();
12674 const char* poolName = m_hParentPool->GetName();
12675 if(poolName != VMA_NULL && poolName[0] !=
'\0')
12677 json.WriteString(
"Name");
12678 json.WriteString(poolName);
12681 json.WriteString(
"MemoryTypeIndex");
12682 json.WriteNumber(m_MemoryTypeIndex);
12684 json.WriteString(
"BlockSize");
12685 json.WriteNumber(m_PreferredBlockSize);
12687 json.WriteString(
"BlockCount");
12688 json.BeginObject(
true);
12689 if(m_MinBlockCount > 0)
12691 json.WriteString(
"Min");
12692 json.WriteNumber((uint64_t)m_MinBlockCount);
12694 if(m_MaxBlockCount < SIZE_MAX)
12696 json.WriteString(
"Max");
12697 json.WriteNumber((uint64_t)m_MaxBlockCount);
12699 json.WriteString(
"Cur");
12700 json.WriteNumber((uint64_t)m_Blocks.size());
12703 if(m_FrameInUseCount > 0)
12705 json.WriteString(
"FrameInUseCount");
12706 json.WriteNumber(m_FrameInUseCount);
12709 if(m_Algorithm != 0)
12711 json.WriteString(
"Algorithm");
12712 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12717 json.WriteString(
"PreferredBlockSize");
12718 json.WriteNumber(m_PreferredBlockSize);
12721 json.WriteString(
"Blocks");
12722 json.BeginObject();
12723 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12725 json.BeginString();
12726 json.ContinueString(m_Blocks[i]->GetId());
12729 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12736 #endif // #if VMA_STATS_STRING_ENABLED
12738 void VmaBlockVector::Defragment(
12739 class VmaBlockVectorDefragmentationContext* pCtx,
12741 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12742 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12743 VkCommandBuffer commandBuffer)
12745 pCtx->res = VK_SUCCESS;
12747 const VkMemoryPropertyFlags memPropFlags =
12748 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12749 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12751 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12753 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12754 !IsCorruptionDetectionEnabled() &&
12755 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12758 if(canDefragmentOnCpu || canDefragmentOnGpu)
12760 bool defragmentOnGpu;
12762 if(canDefragmentOnGpu != canDefragmentOnCpu)
12764 defragmentOnGpu = canDefragmentOnGpu;
12769 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12770 m_hAllocator->IsIntegratedGpu();
12773 bool overlappingMoveSupported = !defragmentOnGpu;
12775 if(m_hAllocator->m_UseMutex)
12777 m_Mutex.LockWrite();
12778 pCtx->mutexLocked =
true;
12781 pCtx->Begin(overlappingMoveSupported);
12785 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12786 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12787 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12788 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12789 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12792 if(pStats != VMA_NULL)
12794 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12795 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12798 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12799 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12800 if(defragmentOnGpu)
12802 maxGpuBytesToMove -= bytesMoved;
12803 maxGpuAllocationsToMove -= allocationsMoved;
12807 maxCpuBytesToMove -= bytesMoved;
12808 maxCpuAllocationsToMove -= allocationsMoved;
12812 if(pCtx->res >= VK_SUCCESS)
12814 if(defragmentOnGpu)
12816 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12820 ApplyDefragmentationMovesCpu(pCtx, moves);
12826 void VmaBlockVector::DefragmentationEnd(
12827 class VmaBlockVectorDefragmentationContext* pCtx,
12831 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12833 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12834 if(blockCtx.hBuffer)
12836 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12837 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12841 if(pCtx->res >= VK_SUCCESS)
12843 FreeEmptyBlocks(pStats);
12846 if(pCtx->mutexLocked)
12848 VMA_ASSERT(m_hAllocator->m_UseMutex);
12849 m_Mutex.UnlockWrite();
12853 size_t VmaBlockVector::CalcAllocationCount()
const
12856 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12858 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12863 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
12865 if(m_BufferImageGranularity == 1)
12869 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12870 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12872 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12873 VMA_ASSERT(m_Algorithm == 0);
12874 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12875 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12883 void VmaBlockVector::MakePoolAllocationsLost(
12884 uint32_t currentFrameIndex,
12885 size_t* pLostAllocationCount)
12887 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12888 size_t lostAllocationCount = 0;
12889 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12891 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12892 VMA_ASSERT(pBlock);
12893 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12895 if(pLostAllocationCount != VMA_NULL)
12897 *pLostAllocationCount = lostAllocationCount;
12901 VkResult VmaBlockVector::CheckCorruption()
12903 if(!IsCorruptionDetectionEnabled())
12905 return VK_ERROR_FEATURE_NOT_PRESENT;
12908 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12909 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12911 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12912 VMA_ASSERT(pBlock);
12913 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12914 if(res != VK_SUCCESS)
12922 void VmaBlockVector::AddStats(
VmaStats* pStats)
12924 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12925 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12927 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12929 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12931 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12932 VMA_ASSERT(pBlock);
12933 VMA_HEAVY_ASSERT(pBlock->Validate());
12935 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12936 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12937 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12938 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12945 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12947 VmaBlockVector* pBlockVector,
12948 uint32_t currentFrameIndex,
12949 bool overlappingMoveSupported) :
12950 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12951 m_AllocationCount(0),
12952 m_AllAllocations(false),
12954 m_AllocationsMoved(0),
12955 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12958 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12959 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12961 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12962 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12963 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12964 m_Blocks.push_back(pBlockInfo);
12968 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12971 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12973 for(
size_t i = m_Blocks.size(); i--; )
12975 vma_delete(m_hAllocator, m_Blocks[i]);
12979 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12982 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12984 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12985 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12986 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12988 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12989 (*it)->m_Allocations.push_back(allocInfo);
12996 ++m_AllocationCount;
13000 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13001 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13002 VkDeviceSize maxBytesToMove,
13003 uint32_t maxAllocationsToMove)
13005 if(m_Blocks.empty())
13018 size_t srcBlockMinIndex = 0;
13031 size_t srcBlockIndex = m_Blocks.size() - 1;
13032 size_t srcAllocIndex = SIZE_MAX;
13038 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13040 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13043 if(srcBlockIndex == srcBlockMinIndex)
13050 srcAllocIndex = SIZE_MAX;
13055 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13059 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13060 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13062 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13063 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13064 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13065 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13068 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13070 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13071 VmaAllocationRequest dstAllocRequest;
13072 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13073 m_CurrentFrameIndex,
13074 m_pBlockVector->GetFrameInUseCount(),
13075 m_pBlockVector->GetBufferImageGranularity(),
13082 &dstAllocRequest) &&
13084 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13086 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13089 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13090 (m_BytesMoved + size > maxBytesToMove))
13095 VmaDefragmentationMove move;
13096 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13097 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13098 move.srcOffset = srcOffset;
13099 move.dstOffset = dstAllocRequest.offset;
13101 moves.push_back(move);
13103 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13107 allocInfo.m_hAllocation);
13108 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13110 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13112 if(allocInfo.m_pChanged != VMA_NULL)
13114 *allocInfo.m_pChanged = VK_TRUE;
13117 ++m_AllocationsMoved;
13118 m_BytesMoved += size;
13120 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13128 if(srcAllocIndex > 0)
13134 if(srcBlockIndex > 0)
13137 srcAllocIndex = SIZE_MAX;
13147 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13150 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13152 if(m_Blocks[i]->m_HasNonMovableAllocations)
13160 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13161 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13162 VkDeviceSize maxBytesToMove,
13163 uint32_t maxAllocationsToMove)
13165 if(!m_AllAllocations && m_AllocationCount == 0)
13170 const size_t blockCount = m_Blocks.size();
13171 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13173 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13175 if(m_AllAllocations)
13177 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13178 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13179 it != pMetadata->m_Suballocations.end();
13182 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13184 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13185 pBlockInfo->m_Allocations.push_back(allocInfo);
13190 pBlockInfo->CalcHasNonMovableAllocations();
13194 pBlockInfo->SortAllocationsByOffsetDescending();
13200 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
13203 const uint32_t roundCount = 2;
13206 VkResult result = VK_SUCCESS;
13207 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
13209 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
13215 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
13216 size_t dstBlockIndex, VkDeviceSize dstOffset,
13217 size_t srcBlockIndex, VkDeviceSize srcOffset)
13219 if(dstBlockIndex < srcBlockIndex)
13223 if(dstBlockIndex > srcBlockIndex)
13227 if(dstOffset < srcOffset)
13237 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
13239 VmaBlockVector* pBlockVector,
13240 uint32_t currentFrameIndex,
13241 bool overlappingMoveSupported) :
13242 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13243 m_OverlappingMoveSupported(overlappingMoveSupported),
13244 m_AllocationCount(0),
13245 m_AllAllocations(false),
13247 m_AllocationsMoved(0),
13248 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
13250 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
13254 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
13258 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
13259 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13260 VkDeviceSize maxBytesToMove,
13261 uint32_t maxAllocationsToMove)
13263 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
13265 const size_t blockCount = m_pBlockVector->GetBlockCount();
13266 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
13271 PreprocessMetadata();
13275 m_BlockInfos.resize(blockCount);
13276 for(
size_t i = 0; i < blockCount; ++i)
13278 m_BlockInfos[i].origBlockIndex = i;
13281 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13282 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13283 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13288 FreeSpaceDatabase freeSpaceDb;
13290 size_t dstBlockInfoIndex = 0;
13291 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13292 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13293 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13294 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13295 VkDeviceSize dstOffset = 0;
13298 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13300 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13301 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13302 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13303 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13304 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13306 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13307 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13308 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13309 if(m_AllocationsMoved == maxAllocationsToMove ||
13310 m_BytesMoved + srcAllocSize > maxBytesToMove)
13315 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13318 size_t freeSpaceInfoIndex;
13319 VkDeviceSize dstAllocOffset;
13320 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13321 freeSpaceInfoIndex, dstAllocOffset))
13323 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13324 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13325 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13328 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13330 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13334 VmaSuballocation suballoc = *srcSuballocIt;
13335 suballoc.offset = dstAllocOffset;
13336 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13337 m_BytesMoved += srcAllocSize;
13338 ++m_AllocationsMoved;
13340 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13342 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13343 srcSuballocIt = nextSuballocIt;
13345 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13347 VmaDefragmentationMove move = {
13348 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13349 srcAllocOffset, dstAllocOffset,
13351 moves.push_back(move);
13358 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13360 VmaSuballocation suballoc = *srcSuballocIt;
13361 suballoc.offset = dstAllocOffset;
13362 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13363 m_BytesMoved += srcAllocSize;
13364 ++m_AllocationsMoved;
13366 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13368 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13369 srcSuballocIt = nextSuballocIt;
13371 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13373 VmaDefragmentationMove move = {
13374 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13375 srcAllocOffset, dstAllocOffset,
13377 moves.push_back(move);
13382 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13385 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13386 dstAllocOffset + srcAllocSize > dstBlockSize)
13389 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13391 ++dstBlockInfoIndex;
13392 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13393 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13394 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13395 dstBlockSize = pDstMetadata->GetSize();
13397 dstAllocOffset = 0;
13401 if(dstBlockInfoIndex == srcBlockInfoIndex)
13403 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13405 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13407 bool skipOver = overlap;
13408 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13412 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13417 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13419 dstOffset = srcAllocOffset + srcAllocSize;
13425 srcSuballocIt->offset = dstAllocOffset;
13426 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13427 dstOffset = dstAllocOffset + srcAllocSize;
13428 m_BytesMoved += srcAllocSize;
13429 ++m_AllocationsMoved;
13431 VmaDefragmentationMove move = {
13432 srcOrigBlockIndex, dstOrigBlockIndex,
13433 srcAllocOffset, dstAllocOffset,
13435 moves.push_back(move);
13443 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13444 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13446 VmaSuballocation suballoc = *srcSuballocIt;
13447 suballoc.offset = dstAllocOffset;
13448 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13449 dstOffset = dstAllocOffset + srcAllocSize;
13450 m_BytesMoved += srcAllocSize;
13451 ++m_AllocationsMoved;
13453 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13455 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13456 srcSuballocIt = nextSuballocIt;
13458 pDstMetadata->m_Suballocations.push_back(suballoc);
13460 VmaDefragmentationMove move = {
13461 srcOrigBlockIndex, dstOrigBlockIndex,
13462 srcAllocOffset, dstAllocOffset,
13464 moves.push_back(move);
13470 m_BlockInfos.clear();
13472 PostprocessMetadata();
13477 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13479 const size_t blockCount = m_pBlockVector->GetBlockCount();
13480 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13482 VmaBlockMetadata_Generic*
const pMetadata =
13483 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13484 pMetadata->m_FreeCount = 0;
13485 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13486 pMetadata->m_FreeSuballocationsBySize.clear();
13487 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13488 it != pMetadata->m_Suballocations.end(); )
13490 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13492 VmaSuballocationList::iterator nextIt = it;
13494 pMetadata->m_Suballocations.erase(it);
13505 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13507 const size_t blockCount = m_pBlockVector->GetBlockCount();
13508 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13510 VmaBlockMetadata_Generic*
const pMetadata =
13511 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13512 const VkDeviceSize blockSize = pMetadata->GetSize();
13515 if(pMetadata->m_Suballocations.empty())
13517 pMetadata->m_FreeCount = 1;
13519 VmaSuballocation suballoc = {
13523 VMA_SUBALLOCATION_TYPE_FREE };
13524 pMetadata->m_Suballocations.push_back(suballoc);
13525 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13530 VkDeviceSize offset = 0;
13531 VmaSuballocationList::iterator it;
13532 for(it = pMetadata->m_Suballocations.begin();
13533 it != pMetadata->m_Suballocations.end();
13536 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13537 VMA_ASSERT(it->offset >= offset);
13540 if(it->offset > offset)
13542 ++pMetadata->m_FreeCount;
13543 const VkDeviceSize freeSize = it->offset - offset;
13544 VmaSuballocation suballoc = {
13548 VMA_SUBALLOCATION_TYPE_FREE };
13549 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13550 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13552 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13556 pMetadata->m_SumFreeSize -= it->size;
13557 offset = it->offset + it->size;
13561 if(offset < blockSize)
13563 ++pMetadata->m_FreeCount;
13564 const VkDeviceSize freeSize = blockSize - offset;
13565 VmaSuballocation suballoc = {
13569 VMA_SUBALLOCATION_TYPE_FREE };
13570 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13571 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13572 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13574 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13579 pMetadata->m_FreeSuballocationsBySize.begin(),
13580 pMetadata->m_FreeSuballocationsBySize.end(),
13581 VmaSuballocationItemSizeLess());
13584 VMA_HEAVY_ASSERT(pMetadata->Validate());
13588 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13591 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13592 while(it != pMetadata->m_Suballocations.end())
13594 if(it->offset < suballoc.offset)
13599 pMetadata->m_Suballocations.insert(it, suballoc);
13605 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13608 VmaBlockVector* pBlockVector,
13609 uint32_t currFrameIndex) :
13611 mutexLocked(false),
13612 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13613 m_hAllocator(hAllocator),
13614 m_hCustomPool(hCustomPool),
13615 m_pBlockVector(pBlockVector),
13616 m_CurrFrameIndex(currFrameIndex),
13617 m_pAlgorithm(VMA_NULL),
13618 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13619 m_AllAllocations(false)
13623 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13625 vma_delete(m_hAllocator, m_pAlgorithm);
13628 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13630 AllocInfo info = { hAlloc, pChanged };
13631 m_Allocations.push_back(info);
13634 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13636 const bool allAllocations = m_AllAllocations ||
13637 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13649 if(VMA_DEBUG_MARGIN == 0 &&
13651 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13653 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13654 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13658 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13659 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13664 m_pAlgorithm->AddAll();
13668 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13670 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13678 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13680 uint32_t currFrameIndex,
13683 m_hAllocator(hAllocator),
13684 m_CurrFrameIndex(currFrameIndex),
13687 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13689 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13692 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13694 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13696 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13697 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13698 vma_delete(m_hAllocator, pBlockVectorCtx);
13700 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13702 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13703 if(pBlockVectorCtx)
13705 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13706 vma_delete(m_hAllocator, pBlockVectorCtx);
13711 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13713 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13715 VmaPool pool = pPools[poolIndex];
13718 if(pool->m_BlockVector.GetAlgorithm() == 0)
13720 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13722 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13724 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13726 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13731 if(!pBlockVectorDefragCtx)
13733 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13736 &pool->m_BlockVector,
13738 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13741 pBlockVectorDefragCtx->AddAll();
13746 void VmaDefragmentationContext_T::AddAllocations(
13747 uint32_t allocationCount,
13749 VkBool32* pAllocationsChanged)
13752 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13755 VMA_ASSERT(hAlloc);
13757 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13759 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13761 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13763 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13765 if(hAllocPool != VK_NULL_HANDLE)
13768 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13770 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13772 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13774 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13778 if(!pBlockVectorDefragCtx)
13780 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13783 &hAllocPool->m_BlockVector,
13785 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13792 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13793 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13794 if(!pBlockVectorDefragCtx)
13796 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13799 m_hAllocator->m_pBlockVectors[memTypeIndex],
13801 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13805 if(pBlockVectorDefragCtx)
13807 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13808 &pAllocationsChanged[allocIndex] : VMA_NULL;
13809 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13815 VkResult VmaDefragmentationContext_T::Defragment(
13816 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13817 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13825 if(commandBuffer == VK_NULL_HANDLE)
13827 maxGpuBytesToMove = 0;
13828 maxGpuAllocationsToMove = 0;
13831 VkResult res = VK_SUCCESS;
13834 for(uint32_t memTypeIndex = 0;
13835 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13838 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13839 if(pBlockVectorCtx)
13841 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13842 pBlockVectorCtx->GetBlockVector()->Defragment(
13845 maxCpuBytesToMove, maxCpuAllocationsToMove,
13846 maxGpuBytesToMove, maxGpuAllocationsToMove,
13848 if(pBlockVectorCtx->res != VK_SUCCESS)
13850 res = pBlockVectorCtx->res;
13856 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13857 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13860 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13861 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13862 pBlockVectorCtx->GetBlockVector()->Defragment(
13865 maxCpuBytesToMove, maxCpuAllocationsToMove,
13866 maxGpuBytesToMove, maxGpuAllocationsToMove,
13868 if(pBlockVectorCtx->res != VK_SUCCESS)
13870 res = pBlockVectorCtx->res;
13880 #if VMA_RECORDING_ENABLED
13882 VmaRecorder::VmaRecorder() :
13887 m_StartCounter(INT64_MAX)
13893 m_UseMutex = useMutex;
13894 m_Flags = settings.
flags;
13896 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13897 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13900 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13903 return VK_ERROR_INITIALIZATION_FAILED;
13907 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13908 fprintf(m_File,
"%s\n",
"1,7");
13913 VmaRecorder::~VmaRecorder()
13915 if(m_File != VMA_NULL)
13921 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13923 CallParams callParams;
13924 GetBasicParams(callParams);
13926 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13927 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13931 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13933 CallParams callParams;
13934 GetBasicParams(callParams);
13936 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13937 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13943 CallParams callParams;
13944 GetBasicParams(callParams);
13946 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13947 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13958 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13960 CallParams callParams;
13961 GetBasicParams(callParams);
13963 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13964 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13969 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13970 const VkMemoryRequirements& vkMemReq,
13974 CallParams callParams;
13975 GetBasicParams(callParams);
13977 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13978 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13979 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13981 vkMemReq.alignment,
13982 vkMemReq.memoryTypeBits,
13990 userDataStr.GetString());
13994 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13995 const VkMemoryRequirements& vkMemReq,
13997 uint64_t allocationCount,
14000 CallParams callParams;
14001 GetBasicParams(callParams);
14003 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14004 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14005 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14007 vkMemReq.alignment,
14008 vkMemReq.memoryTypeBits,
14015 PrintPointerList(allocationCount, pAllocations);
14016 fprintf(m_File,
",%s\n", userDataStr.GetString());
14020 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
14021 const VkMemoryRequirements& vkMemReq,
14022 bool requiresDedicatedAllocation,
14023 bool prefersDedicatedAllocation,
14027 CallParams callParams;
14028 GetBasicParams(callParams);
14030 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14031 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14032 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14034 vkMemReq.alignment,
14035 vkMemReq.memoryTypeBits,
14036 requiresDedicatedAllocation ? 1 : 0,
14037 prefersDedicatedAllocation ? 1 : 0,
14045 userDataStr.GetString());
14049 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
14050 const VkMemoryRequirements& vkMemReq,
14051 bool requiresDedicatedAllocation,
14052 bool prefersDedicatedAllocation,
14056 CallParams callParams;
14057 GetBasicParams(callParams);
14059 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14060 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14061 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14063 vkMemReq.alignment,
14064 vkMemReq.memoryTypeBits,
14065 requiresDedicatedAllocation ? 1 : 0,
14066 prefersDedicatedAllocation ? 1 : 0,
14074 userDataStr.GetString());
14078 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
14081 CallParams callParams;
14082 GetBasicParams(callParams);
14084 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14085 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14090 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
14091 uint64_t allocationCount,
14094 CallParams callParams;
14095 GetBasicParams(callParams);
14097 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14098 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
14099 PrintPointerList(allocationCount, pAllocations);
14100 fprintf(m_File,
"\n");
14104 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
14106 const void* pUserData)
14108 CallParams callParams;
14109 GetBasicParams(callParams);
14111 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14112 UserDataString userDataStr(
14115 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14117 userDataStr.GetString());
14121 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
14124 CallParams callParams;
14125 GetBasicParams(callParams);
14127 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14128 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14133 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
14136 CallParams callParams;
14137 GetBasicParams(callParams);
14139 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14140 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14145 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
14148 CallParams callParams;
14149 GetBasicParams(callParams);
14151 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14152 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14157 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
14158 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14160 CallParams callParams;
14161 GetBasicParams(callParams);
14163 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14164 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14171 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
14172 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14174 CallParams callParams;
14175 GetBasicParams(callParams);
14177 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14178 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14185 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
14186 const VkBufferCreateInfo& bufCreateInfo,
14190 CallParams callParams;
14191 GetBasicParams(callParams);
14193 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14194 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14195 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14196 bufCreateInfo.flags,
14197 bufCreateInfo.size,
14198 bufCreateInfo.usage,
14199 bufCreateInfo.sharingMode,
14200 allocCreateInfo.
flags,
14201 allocCreateInfo.
usage,
14205 allocCreateInfo.
pool,
14207 userDataStr.GetString());
14211 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
14212 const VkImageCreateInfo& imageCreateInfo,
14216 CallParams callParams;
14217 GetBasicParams(callParams);
14219 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14220 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14221 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14222 imageCreateInfo.flags,
14223 imageCreateInfo.imageType,
14224 imageCreateInfo.format,
14225 imageCreateInfo.extent.width,
14226 imageCreateInfo.extent.height,
14227 imageCreateInfo.extent.depth,
14228 imageCreateInfo.mipLevels,
14229 imageCreateInfo.arrayLayers,
14230 imageCreateInfo.samples,
14231 imageCreateInfo.tiling,
14232 imageCreateInfo.usage,
14233 imageCreateInfo.sharingMode,
14234 imageCreateInfo.initialLayout,
14235 allocCreateInfo.
flags,
14236 allocCreateInfo.
usage,
14240 allocCreateInfo.
pool,
14242 userDataStr.GetString());
14246 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
14249 CallParams callParams;
14250 GetBasicParams(callParams);
14252 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14253 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
14258 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14261 CallParams callParams;
14262 GetBasicParams(callParams);
14264 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14265 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
14270 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
14273 CallParams callParams;
14274 GetBasicParams(callParams);
14276 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14277 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14282 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14285 CallParams callParams;
14286 GetBasicParams(callParams);
14288 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14289 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14294 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14297 CallParams callParams;
14298 GetBasicParams(callParams);
14300 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14301 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14306 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14310 CallParams callParams;
14311 GetBasicParams(callParams);
14313 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14314 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14317 fprintf(m_File,
",");
14319 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14329 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14332 CallParams callParams;
14333 GetBasicParams(callParams);
14335 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14336 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14341 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
14345 CallParams callParams;
14346 GetBasicParams(callParams);
14348 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14349 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14350 pool, name != VMA_NULL ? name :
"");
14356 if(pUserData != VMA_NULL)
14360 m_Str = (
const char*)pUserData;
14364 sprintf_s(m_PtrStr,
"%p", pUserData);
14374 void VmaRecorder::WriteConfiguration(
14375 const VkPhysicalDeviceProperties& devProps,
14376 const VkPhysicalDeviceMemoryProperties& memProps,
14377 bool dedicatedAllocationExtensionEnabled,
14378 bool bindMemory2ExtensionEnabled,
14379 bool memoryBudgetExtensionEnabled)
14381 fprintf(m_File,
"Config,Begin\n");
14383 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14384 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14385 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14386 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14387 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14388 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14390 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14391 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14392 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14394 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14395 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14397 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14398 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14400 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14401 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14403 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14404 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14407 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14408 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
14409 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
14411 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14412 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14413 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14414 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14415 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14416 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14417 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14418 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14419 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14421 fprintf(m_File,
"Config,End\n");
14424 void VmaRecorder::GetBasicParams(CallParams& outParams)
14426 outParams.threadId = GetCurrentThreadId();
14428 LARGE_INTEGER counter;
14429 QueryPerformanceCounter(&counter);
14430 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14433 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14437 fprintf(m_File,
"%p", pItems[0]);
14438 for(uint64_t i = 1; i < count; ++i)
14440 fprintf(m_File,
" %p", pItems[i]);
14445 void VmaRecorder::Flush()
14453 #endif // #if VMA_RECORDING_ENABLED
14458 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14459 m_Allocator(pAllocationCallbacks, 1024)
14465 VmaMutexLock mutexLock(m_Mutex);
14466 return m_Allocator.Alloc();
14469 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14471 VmaMutexLock mutexLock(m_Mutex);
14472 m_Allocator.Free(hAlloc);
14483 m_hDevice(pCreateInfo->device),
14484 m_hInstance(pCreateInfo->instance),
14485 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14486 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14487 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14488 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14489 m_HeapSizeLimitMask(0),
14490 m_PreferredLargeHeapBlockSize(0),
14491 m_PhysicalDevice(pCreateInfo->physicalDevice),
14492 m_CurrentFrameIndex(0),
14493 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14494 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14497 ,m_pRecorder(VMA_NULL)
14500 if(VMA_DEBUG_DETECT_CORRUPTION)
14503 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14508 #if !(VMA_DEDICATED_ALLOCATION)
14511 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14514 #if !(VMA_BIND_MEMORY2)
14517 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
14520 #if !(VMA_MEMORY_BUDGET)
14523 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
14527 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14528 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14529 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14531 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14532 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14533 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
14543 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14544 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14546 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14547 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14548 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14549 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14556 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14558 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14559 if(limit != VK_WHOLE_SIZE)
14561 m_HeapSizeLimitMask |= 1u << heapIndex;
14562 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14564 m_MemProps.memoryHeaps[heapIndex].size = limit;
14570 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14572 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14574 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14578 preferredBlockSize,
14581 GetBufferImageGranularity(),
14587 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14594 VkResult res = VK_SUCCESS;
14599 #if VMA_RECORDING_ENABLED
14600 m_pRecorder = vma_new(
this, VmaRecorder)();
14602 if(res != VK_SUCCESS)
14606 m_pRecorder->WriteConfiguration(
14607 m_PhysicalDeviceProperties,
14609 m_UseKhrDedicatedAllocation,
14610 m_UseKhrBindMemory2,
14611 m_UseExtMemoryBudget);
14612 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14614 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14615 return VK_ERROR_FEATURE_NOT_PRESENT;
14619 #if VMA_MEMORY_BUDGET
14620 if(m_UseExtMemoryBudget)
14622 UpdateVulkanBudget();
14624 #endif // #if VMA_MEMORY_BUDGET
14629 VmaAllocator_T::~VmaAllocator_T()
14631 #if VMA_RECORDING_ENABLED
14632 if(m_pRecorder != VMA_NULL)
14634 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14635 vma_delete(
this, m_pRecorder);
14639 VMA_ASSERT(m_Pools.empty());
14641 for(
size_t i = GetMemoryTypeCount(); i--; )
14643 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14645 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14648 vma_delete(
this, m_pDedicatedAllocations[i]);
14649 vma_delete(
this, m_pBlockVectors[i]);
14653 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14655 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14656 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14657 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14658 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14659 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14660 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14661 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14662 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14663 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14664 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14665 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14666 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14667 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14668 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14669 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14670 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14671 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14672 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14673 #if VMA_DEDICATED_ALLOCATION
14674 if(m_UseKhrDedicatedAllocation)
14676 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14677 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14678 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14679 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14681 #endif // #if VMA_DEDICATED_ALLOCATION
14682 #if VMA_BIND_MEMORY2
14683 if(m_UseKhrBindMemory2)
14685 m_VulkanFunctions.vkBindBufferMemory2KHR =
14686 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2KHR");
14687 m_VulkanFunctions.vkBindImageMemory2KHR =
14688 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2KHR");
14690 #endif // #if VMA_BIND_MEMORY2
14691 #if VMA_MEMORY_BUDGET
14692 if(m_UseExtMemoryBudget)
14694 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
14695 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
14696 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2KHR");
14698 #endif // #if VMA_MEMORY_BUDGET
14699 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14701 #define VMA_COPY_IF_NOT_NULL(funcName) \
14702 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
14704 if(pVulkanFunctions != VMA_NULL)
14706 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14707 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14708 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14709 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14710 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14711 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14712 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14713 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14714 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14715 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14716 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14717 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14718 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14719 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14720 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14721 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14722 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14723 #if VMA_DEDICATED_ALLOCATION
14724 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14725 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14727 #if VMA_BIND_MEMORY2
14728 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
14729 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
14731 #if VMA_MEMORY_BUDGET
14732 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
14736 #undef VMA_COPY_IF_NOT_NULL
14740 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14741 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14742 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14743 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14744 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14745 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14746 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14747 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14748 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14749 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14750 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14751 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14752 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14753 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14754 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14755 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14756 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14757 #if VMA_DEDICATED_ALLOCATION
14758 if(m_UseKhrDedicatedAllocation)
14760 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14761 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14764 #if VMA_BIND_MEMORY2
14765 if(m_UseKhrBindMemory2)
14767 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
14768 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
14771 #if VMA_MEMORY_BUDGET
14772 if(m_UseExtMemoryBudget)
14774 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
14779 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14781 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14782 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14783 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14784 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
14787 VkResult VmaAllocator_T::AllocateMemoryOfType(
14789 VkDeviceSize alignment,
14790 bool dedicatedAllocation,
14791 VkBuffer dedicatedBuffer,
14792 VkImage dedicatedImage,
14794 uint32_t memTypeIndex,
14795 VmaSuballocationType suballocType,
14796 size_t allocationCount,
14799 VMA_ASSERT(pAllocations != VMA_NULL);
14800 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14806 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14811 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14812 VMA_ASSERT(blockVector);
14814 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14815 bool preferDedicatedMemory =
14816 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14817 dedicatedAllocation ||
14819 size > preferredBlockSize / 2;
14821 if(preferDedicatedMemory &&
14823 finalCreateInfo.
pool == VK_NULL_HANDLE)
14832 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14836 return AllocateDedicatedMemory(
14852 VkResult res = blockVector->Allocate(
14853 m_CurrentFrameIndex.load(),
14860 if(res == VK_SUCCESS)
14868 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14872 res = AllocateDedicatedMemory(
14879 finalCreateInfo.pUserData,
14884 if(res == VK_SUCCESS)
14887 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14893 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14900 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14902 VmaSuballocationType suballocType,
14903 uint32_t memTypeIndex,
14906 bool isUserDataString,
14908 VkBuffer dedicatedBuffer,
14909 VkImage dedicatedImage,
14910 size_t allocationCount,
14913 VMA_ASSERT(allocationCount > 0 && pAllocations);
14917 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14919 GetBudget(&heapBudget, heapIndex, 1);
14920 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
14922 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14926 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14927 allocInfo.memoryTypeIndex = memTypeIndex;
14928 allocInfo.allocationSize = size;
14930 #if VMA_DEDICATED_ALLOCATION
14931 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14932 if(m_UseKhrDedicatedAllocation)
14934 if(dedicatedBuffer != VK_NULL_HANDLE)
14936 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14937 dedicatedAllocInfo.buffer = dedicatedBuffer;
14938 allocInfo.pNext = &dedicatedAllocInfo;
14940 else if(dedicatedImage != VK_NULL_HANDLE)
14942 dedicatedAllocInfo.image = dedicatedImage;
14943 allocInfo.pNext = &dedicatedAllocInfo;
14946 #endif // #if VMA_DEDICATED_ALLOCATION
14949 VkResult res = VK_SUCCESS;
14950 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14952 res = AllocateDedicatedMemoryPage(
14960 pAllocations + allocIndex);
14961 if(res != VK_SUCCESS)
14967 if(res == VK_SUCCESS)
14971 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14972 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14973 VMA_ASSERT(pDedicatedAllocations);
14974 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14976 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14980 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14985 while(allocIndex--)
14988 VkDeviceMemory hMemory = currAlloc->GetMemory();
15000 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
15001 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
15002 currAlloc->SetUserData(
this, VMA_NULL);
15004 m_AllocationObjectAllocator.Free(currAlloc);
15007 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15013 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
15015 VmaSuballocationType suballocType,
15016 uint32_t memTypeIndex,
15017 const VkMemoryAllocateInfo& allocInfo,
15019 bool isUserDataString,
15023 VkDeviceMemory hMemory = VK_NULL_HANDLE;
15024 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
15027 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15031 void* pMappedData = VMA_NULL;
15034 res = (*m_VulkanFunctions.vkMapMemory)(
15043 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
15044 FreeVulkanMemory(memTypeIndex, size, hMemory);
15049 *pAllocation = m_AllocationObjectAllocator.Allocate();
15050 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
15051 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
15052 (*pAllocation)->SetUserData(
this, pUserData);
15053 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
15054 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15056 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
15062 void VmaAllocator_T::GetBufferMemoryRequirements(
15064 VkMemoryRequirements& memReq,
15065 bool& requiresDedicatedAllocation,
15066 bool& prefersDedicatedAllocation)
const
15068 #if VMA_DEDICATED_ALLOCATION
15069 if(m_UseKhrDedicatedAllocation)
15071 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
15072 memReqInfo.buffer = hBuffer;
15074 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15076 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15077 memReq2.pNext = &memDedicatedReq;
15079 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15081 memReq = memReq2.memoryRequirements;
15082 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15083 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15086 #endif // #if VMA_DEDICATED_ALLOCATION
15088 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
15089 requiresDedicatedAllocation =
false;
15090 prefersDedicatedAllocation =
false;
15094 void VmaAllocator_T::GetImageMemoryRequirements(
15096 VkMemoryRequirements& memReq,
15097 bool& requiresDedicatedAllocation,
15098 bool& prefersDedicatedAllocation)
const
15100 #if VMA_DEDICATED_ALLOCATION
15101 if(m_UseKhrDedicatedAllocation)
15103 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
15104 memReqInfo.image = hImage;
15106 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15108 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15109 memReq2.pNext = &memDedicatedReq;
15111 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15113 memReq = memReq2.memoryRequirements;
15114 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15115 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15118 #endif // #if VMA_DEDICATED_ALLOCATION
15120 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
15121 requiresDedicatedAllocation =
false;
15122 prefersDedicatedAllocation =
false;
15126 VkResult VmaAllocator_T::AllocateMemory(
15127 const VkMemoryRequirements& vkMemReq,
15128 bool requiresDedicatedAllocation,
15129 bool prefersDedicatedAllocation,
15130 VkBuffer dedicatedBuffer,
15131 VkImage dedicatedImage,
15133 VmaSuballocationType suballocType,
15134 size_t allocationCount,
15137 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15139 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
15141 if(vkMemReq.size == 0)
15143 return VK_ERROR_VALIDATION_FAILED_EXT;
15148 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
15149 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15154 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
15155 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15157 if(requiresDedicatedAllocation)
15161 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
15162 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15164 if(createInfo.
pool != VK_NULL_HANDLE)
15166 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
15167 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15170 if((createInfo.
pool != VK_NULL_HANDLE) &&
15173 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
15174 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15177 if(createInfo.
pool != VK_NULL_HANDLE)
15179 const VkDeviceSize alignmentForPool = VMA_MAX(
15180 vkMemReq.alignment,
15181 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
15186 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15191 return createInfo.
pool->m_BlockVector.Allocate(
15192 m_CurrentFrameIndex.load(),
15203 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
15204 uint32_t memTypeIndex = UINT32_MAX;
15206 if(res == VK_SUCCESS)
15208 VkDeviceSize alignmentForMemType = VMA_MAX(
15209 vkMemReq.alignment,
15210 GetMemoryTypeMinAlignment(memTypeIndex));
15212 res = AllocateMemoryOfType(
15214 alignmentForMemType,
15215 requiresDedicatedAllocation || prefersDedicatedAllocation,
15224 if(res == VK_SUCCESS)
15234 memoryTypeBits &= ~(1u << memTypeIndex);
15237 if(res == VK_SUCCESS)
15239 alignmentForMemType = VMA_MAX(
15240 vkMemReq.alignment,
15241 GetMemoryTypeMinAlignment(memTypeIndex));
15243 res = AllocateMemoryOfType(
15245 alignmentForMemType,
15246 requiresDedicatedAllocation || prefersDedicatedAllocation,
15255 if(res == VK_SUCCESS)
15265 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15276 void VmaAllocator_T::FreeMemory(
15277 size_t allocationCount,
15280 VMA_ASSERT(pAllocations);
15282 for(
size_t allocIndex = allocationCount; allocIndex--; )
15286 if(allocation != VK_NULL_HANDLE)
15288 if(TouchAllocation(allocation))
15290 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15292 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
15295 switch(allocation->GetType())
15297 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15299 VmaBlockVector* pBlockVector = VMA_NULL;
15300 VmaPool hPool = allocation->GetBlock()->GetParentPool();
15301 if(hPool != VK_NULL_HANDLE)
15303 pBlockVector = &hPool->m_BlockVector;
15307 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15308 pBlockVector = m_pBlockVectors[memTypeIndex];
15310 pBlockVector->Free(allocation);
15313 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15314 FreeDedicatedMemory(allocation);
15322 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
15323 allocation->SetUserData(
this, VMA_NULL);
15324 allocation->Dtor();
15325 m_AllocationObjectAllocator.Free(allocation);
15330 VkResult VmaAllocator_T::ResizeAllocation(
15332 VkDeviceSize newSize)
15335 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
15337 return VK_ERROR_VALIDATION_FAILED_EXT;
15339 if(newSize == alloc->GetSize())
15343 return VK_ERROR_OUT_OF_POOL_MEMORY;
15346 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
15349 InitStatInfo(pStats->
total);
15350 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
15352 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
15356 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15358 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15359 VMA_ASSERT(pBlockVector);
15360 pBlockVector->AddStats(pStats);
15365 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15366 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15368 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
15373 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15375 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15376 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15377 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15378 VMA_ASSERT(pDedicatedAllocVector);
15379 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
15382 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
15383 VmaAddStatInfo(pStats->
total, allocationStatInfo);
15384 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
15385 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15390 VmaPostprocessCalcStatInfo(pStats->
total);
15391 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15392 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15393 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15394 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15397 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
15399 #if VMA_MEMORY_BUDGET
15400 if(m_UseExtMemoryBudget)
15402 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
15404 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
15405 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15407 const uint32_t heapIndex = firstHeap + i;
15409 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15412 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
15414 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
15415 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
15419 outBudget->
usage = 0;
15423 outBudget->
budget = VMA_MIN(
15424 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
15429 UpdateVulkanBudget();
15430 GetBudget(outBudget, firstHeap, heapCount);
15436 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15438 const uint32_t heapIndex = firstHeap + i;
15440 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15444 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
15449 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15451 VkResult VmaAllocator_T::DefragmentationBegin(
15461 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15462 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15465 (*pContext)->AddAllocations(
15468 VkResult res = (*pContext)->Defragment(
15473 if(res != VK_NOT_READY)
15475 vma_delete(
this, *pContext);
15476 *pContext = VMA_NULL;
15482 VkResult VmaAllocator_T::DefragmentationEnd(
15485 vma_delete(
this, context);
15491 if(hAllocation->CanBecomeLost())
15497 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15498 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15501 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15505 pAllocationInfo->
offset = 0;
15506 pAllocationInfo->
size = hAllocation->GetSize();
15508 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15511 else if(localLastUseFrameIndex == localCurrFrameIndex)
15513 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15514 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15515 pAllocationInfo->
offset = hAllocation->GetOffset();
15516 pAllocationInfo->
size = hAllocation->GetSize();
15518 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15523 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15525 localLastUseFrameIndex = localCurrFrameIndex;
15532 #if VMA_STATS_STRING_ENABLED
15533 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15534 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15537 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15538 if(localLastUseFrameIndex == localCurrFrameIndex)
15544 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15546 localLastUseFrameIndex = localCurrFrameIndex;
15552 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15553 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15554 pAllocationInfo->
offset = hAllocation->GetOffset();
15555 pAllocationInfo->
size = hAllocation->GetSize();
15556 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15557 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15561 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15564 if(hAllocation->CanBecomeLost())
15566 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15567 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15570 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15574 else if(localLastUseFrameIndex == localCurrFrameIndex)
15580 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15582 localLastUseFrameIndex = localCurrFrameIndex;
15589 #if VMA_STATS_STRING_ENABLED
15590 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15591 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15594 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15595 if(localLastUseFrameIndex == localCurrFrameIndex)
15601 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15603 localLastUseFrameIndex = localCurrFrameIndex;
15615 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15625 return VK_ERROR_INITIALIZATION_FAILED;
15628 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15630 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15632 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15633 if(res != VK_SUCCESS)
15635 vma_delete(
this, *pPool);
15642 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15643 (*pPool)->SetId(m_NextPoolId++);
15644 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15650 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15654 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15655 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15656 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15659 vma_delete(
this, pool);
15664 pool->m_BlockVector.GetPoolStats(pPoolStats);
15667 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15669 m_CurrentFrameIndex.store(frameIndex);
15671 #if VMA_MEMORY_BUDGET
15672 if(m_UseExtMemoryBudget)
15674 UpdateVulkanBudget();
15676 #endif // #if VMA_MEMORY_BUDGET
15679 void VmaAllocator_T::MakePoolAllocationsLost(
15681 size_t* pLostAllocationCount)
15683 hPool->m_BlockVector.MakePoolAllocationsLost(
15684 m_CurrentFrameIndex.load(),
15685 pLostAllocationCount);
15688 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15690 return hPool->m_BlockVector.CheckCorruption();
15693 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15695 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15698 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15700 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15702 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15703 VMA_ASSERT(pBlockVector);
15704 VkResult localRes = pBlockVector->CheckCorruption();
15707 case VK_ERROR_FEATURE_NOT_PRESENT:
15710 finalRes = VK_SUCCESS;
15720 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15721 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15723 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15725 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15728 case VK_ERROR_FEATURE_NOT_PRESENT:
15731 finalRes = VK_SUCCESS;
15743 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15745 *pAllocation = m_AllocationObjectAllocator.Allocate();
15746 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15747 (*pAllocation)->InitLost();
15750 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15752 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15755 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
15757 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15758 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
15761 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
15762 if(blockBytesAfterAllocation > heapSize)
15764 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15766 if(m_Budget.m_BlockBytes->compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
15774 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
15778 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15780 if(res == VK_SUCCESS)
15782 #if VMA_MEMORY_BUDGET
15783 ++m_Budget.m_OperationsSinceBudgetFetch;
15787 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15789 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15794 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
15800 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15803 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15805 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15809 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15811 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
15814 VkResult VmaAllocator_T::BindVulkanBuffer(
15815 VkDeviceMemory memory,
15816 VkDeviceSize memoryOffset,
15820 if(pNext != VMA_NULL)
15822 #if VMA_BIND_MEMORY2
15823 if(m_UseKhrBindMemory2 && m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
15825 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
15826 bindBufferMemoryInfo.pNext = pNext;
15827 bindBufferMemoryInfo.buffer = buffer;
15828 bindBufferMemoryInfo.memory = memory;
15829 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15830 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15833 #endif // #if VMA_BIND_MEMORY2
15835 return VK_ERROR_EXTENSION_NOT_PRESENT;
15840 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
15844 VkResult VmaAllocator_T::BindVulkanImage(
15845 VkDeviceMemory memory,
15846 VkDeviceSize memoryOffset,
15850 if(pNext != VMA_NULL)
15852 #if VMA_BIND_MEMORY2
15853 if(m_UseKhrBindMemory2 && m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
15855 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
15856 bindBufferMemoryInfo.pNext = pNext;
15857 bindBufferMemoryInfo.image = image;
15858 bindBufferMemoryInfo.memory = memory;
15859 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15860 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15863 #endif // #if VMA_BIND_MEMORY2
15865 return VK_ERROR_EXTENSION_NOT_PRESENT;
15870 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
15874 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15876 if(hAllocation->CanBecomeLost())
15878 return VK_ERROR_MEMORY_MAP_FAILED;
15881 switch(hAllocation->GetType())
15883 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15885 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15886 char *pBytes = VMA_NULL;
15887 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15888 if(res == VK_SUCCESS)
15890 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15891 hAllocation->BlockAllocMap();
15895 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15896 return hAllocation->DedicatedAllocMap(
this, ppData);
15899 return VK_ERROR_MEMORY_MAP_FAILED;
15905 switch(hAllocation->GetType())
15907 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15909 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15910 hAllocation->BlockAllocUnmap();
15911 pBlock->Unmap(
this, 1);
15914 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15915 hAllocation->DedicatedAllocUnmap(
this);
15922 VkResult VmaAllocator_T::BindBufferMemory(
15924 VkDeviceSize allocationLocalOffset,
15928 VkResult res = VK_SUCCESS;
15929 switch(hAllocation->GetType())
15931 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15932 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
15934 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15936 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15937 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15938 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
15947 VkResult VmaAllocator_T::BindImageMemory(
15949 VkDeviceSize allocationLocalOffset,
15953 VkResult res = VK_SUCCESS;
15954 switch(hAllocation->GetType())
15956 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15957 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
15959 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15961 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15962 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15963 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
15972 void VmaAllocator_T::FlushOrInvalidateAllocation(
15974 VkDeviceSize offset, VkDeviceSize size,
15975 VMA_CACHE_OPERATION op)
15977 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15978 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15980 const VkDeviceSize allocationSize = hAllocation->GetSize();
15981 VMA_ASSERT(offset <= allocationSize);
15983 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15985 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15986 memRange.memory = hAllocation->GetMemory();
15988 switch(hAllocation->GetType())
15990 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15991 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15992 if(size == VK_WHOLE_SIZE)
15994 memRange.size = allocationSize - memRange.offset;
15998 VMA_ASSERT(offset + size <= allocationSize);
15999 memRange.size = VMA_MIN(
16000 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
16001 allocationSize - memRange.offset);
16005 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16008 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16009 if(size == VK_WHOLE_SIZE)
16011 size = allocationSize - offset;
16015 VMA_ASSERT(offset + size <= allocationSize);
16017 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
16020 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
16021 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
16022 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
16023 memRange.offset += allocationOffset;
16024 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
16035 case VMA_CACHE_FLUSH:
16036 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
16038 case VMA_CACHE_INVALIDATE:
16039 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
16048 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
16050 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
16052 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16054 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16055 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16056 VMA_ASSERT(pDedicatedAllocations);
16057 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
16058 VMA_ASSERT(success);
16061 VkDeviceMemory hMemory = allocation->GetMemory();
16073 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
16075 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
16078 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
16080 VkBufferCreateInfo dummyBufCreateInfo;
16081 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
16083 uint32_t memoryTypeBits = 0;
16086 VkBuffer buf = VK_NULL_HANDLE;
16087 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
16088 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
16089 if(res == VK_SUCCESS)
16092 VkMemoryRequirements memReq;
16093 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
16094 memoryTypeBits = memReq.memoryTypeBits;
16097 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
16100 return memoryTypeBits;
16103 #if VMA_MEMORY_BUDGET
16105 void VmaAllocator_T::UpdateVulkanBudget()
16107 VMA_ASSERT(m_UseExtMemoryBudget);
16109 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
16111 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
16112 memProps.pNext = &budgetProps;
16114 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
16117 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
16119 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16121 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
16122 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
16123 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
16125 m_Budget.m_OperationsSinceBudgetFetch = 0;
16129 #endif // #if VMA_MEMORY_BUDGET
16131 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
16133 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
16134 !hAllocation->CanBecomeLost() &&
16135 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16137 void* pData = VMA_NULL;
16138 VkResult res = Map(hAllocation, &pData);
16139 if(res == VK_SUCCESS)
16141 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
16142 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
16143 Unmap(hAllocation);
16147 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
16152 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
16154 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
16155 if(memoryTypeBits == UINT32_MAX)
16157 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
16158 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
16160 return memoryTypeBits;
16163 #if VMA_STATS_STRING_ENABLED
16165 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
16167 bool dedicatedAllocationsStarted =
false;
16168 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16170 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16171 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16172 VMA_ASSERT(pDedicatedAllocVector);
16173 if(pDedicatedAllocVector->empty() ==
false)
16175 if(dedicatedAllocationsStarted ==
false)
16177 dedicatedAllocationsStarted =
true;
16178 json.WriteString(
"DedicatedAllocations");
16179 json.BeginObject();
16182 json.BeginString(
"Type ");
16183 json.ContinueString(memTypeIndex);
16188 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
16190 json.BeginObject(
true);
16192 hAlloc->PrintParameters(json);
16199 if(dedicatedAllocationsStarted)
16205 bool allocationsStarted =
false;
16206 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16208 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
16210 if(allocationsStarted ==
false)
16212 allocationsStarted =
true;
16213 json.WriteString(
"DefaultPools");
16214 json.BeginObject();
16217 json.BeginString(
"Type ");
16218 json.ContinueString(memTypeIndex);
16221 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
16224 if(allocationsStarted)
16232 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16233 const size_t poolCount = m_Pools.size();
16236 json.WriteString(
"Pools");
16237 json.BeginObject();
16238 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
16240 json.BeginString();
16241 json.ContinueString(m_Pools[poolIndex]->GetId());
16244 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
16251 #endif // #if VMA_STATS_STRING_ENABLED
16260 VMA_ASSERT(pCreateInfo && pAllocator);
16261 VMA_DEBUG_LOG(
"vmaCreateAllocator");
16263 return (*pAllocator)->Init(pCreateInfo);
16269 if(allocator != VK_NULL_HANDLE)
16271 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
16272 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
16273 vma_delete(&allocationCallbacks, allocator);
16279 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
16281 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
16282 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
16287 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
16289 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
16290 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
16295 uint32_t memoryTypeIndex,
16296 VkMemoryPropertyFlags* pFlags)
16298 VMA_ASSERT(allocator && pFlags);
16299 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
16300 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
16305 uint32_t frameIndex)
16307 VMA_ASSERT(allocator);
16308 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
16310 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16312 allocator->SetCurrentFrameIndex(frameIndex);
16319 VMA_ASSERT(allocator && pStats);
16320 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16321 allocator->CalculateStats(pStats);
16328 VMA_ASSERT(allocator && pBudget);
16329 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16330 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
16333 #if VMA_STATS_STRING_ENABLED
16337 char** ppStatsString,
16338 VkBool32 detailedMap)
16340 VMA_ASSERT(allocator && ppStatsString);
16341 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16343 VmaStringBuilder sb(allocator);
16345 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
16346 json.BeginObject();
16349 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
16352 allocator->CalculateStats(&stats);
16354 json.WriteString(
"Total");
16355 VmaPrintStatInfo(json, stats.
total);
16357 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
16359 json.BeginString(
"Heap ");
16360 json.ContinueString(heapIndex);
16362 json.BeginObject();
16364 json.WriteString(
"Size");
16365 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
16367 json.WriteString(
"Flags");
16368 json.BeginArray(
true);
16369 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
16371 json.WriteString(
"DEVICE_LOCAL");
16375 json.WriteString(
"Budget");
16376 json.BeginObject();
16378 json.WriteString(
"BlockBytes");
16379 json.WriteNumber(budget[heapIndex].blockBytes);
16380 json.WriteString(
"AllocationBytes");
16381 json.WriteNumber(budget[heapIndex].allocationBytes);
16382 json.WriteString(
"Usage");
16383 json.WriteNumber(budget[heapIndex].usage);
16384 json.WriteString(
"Budget");
16385 json.WriteNumber(budget[heapIndex].budget);
16391 json.WriteString(
"Stats");
16392 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
16395 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
16397 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
16399 json.BeginString(
"Type ");
16400 json.ContinueString(typeIndex);
16403 json.BeginObject();
16405 json.WriteString(
"Flags");
16406 json.BeginArray(
true);
16407 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
16408 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
16410 json.WriteString(
"DEVICE_LOCAL");
16412 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16414 json.WriteString(
"HOST_VISIBLE");
16416 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
16418 json.WriteString(
"HOST_COHERENT");
16420 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
16422 json.WriteString(
"HOST_CACHED");
16424 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
16426 json.WriteString(
"LAZILY_ALLOCATED");
16432 json.WriteString(
"Stats");
16433 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
16442 if(detailedMap == VK_TRUE)
16444 allocator->PrintDetailedMap(json);
16450 const size_t len = sb.GetLength();
16451 char*
const pChars = vma_new_array(allocator,
char, len + 1);
16454 memcpy(pChars, sb.GetData(), len);
16456 pChars[len] =
'\0';
16457 *ppStatsString = pChars;
16462 char* pStatsString)
16464 if(pStatsString != VMA_NULL)
16466 VMA_ASSERT(allocator);
16467 size_t len = strlen(pStatsString);
16468 vma_delete_array(allocator, pStatsString, len + 1);
16472 #endif // #if VMA_STATS_STRING_ENABLED
16479 uint32_t memoryTypeBits,
16481 uint32_t* pMemoryTypeIndex)
16483 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16484 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16485 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16492 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
16493 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
16496 switch(pAllocationCreateInfo->
usage)
16501 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16503 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16507 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
16510 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16511 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16513 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16517 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16518 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
16524 *pMemoryTypeIndex = UINT32_MAX;
16525 uint32_t minCost = UINT32_MAX;
16526 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
16527 memTypeIndex < allocator->GetMemoryTypeCount();
16528 ++memTypeIndex, memTypeBit <<= 1)
16531 if((memTypeBit & memoryTypeBits) != 0)
16533 const VkMemoryPropertyFlags currFlags =
16534 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
16536 if((requiredFlags & ~currFlags) == 0)
16539 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
16541 if(currCost < minCost)
16543 *pMemoryTypeIndex = memTypeIndex;
16548 minCost = currCost;
16553 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
16558 const VkBufferCreateInfo* pBufferCreateInfo,
16560 uint32_t* pMemoryTypeIndex)
16562 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16563 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16564 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16565 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16567 const VkDevice hDev = allocator->m_hDevice;
16568 VkBuffer hBuffer = VK_NULL_HANDLE;
16569 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16570 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16571 if(res == VK_SUCCESS)
16573 VkMemoryRequirements memReq = {};
16574 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16575 hDev, hBuffer, &memReq);
16579 memReq.memoryTypeBits,
16580 pAllocationCreateInfo,
16583 allocator->GetVulkanFunctions().vkDestroyBuffer(
16584 hDev, hBuffer, allocator->GetAllocationCallbacks());
16591 const VkImageCreateInfo* pImageCreateInfo,
16593 uint32_t* pMemoryTypeIndex)
16595 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16596 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16597 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16598 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16600 const VkDevice hDev = allocator->m_hDevice;
16601 VkImage hImage = VK_NULL_HANDLE;
16602 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16603 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16604 if(res == VK_SUCCESS)
16606 VkMemoryRequirements memReq = {};
16607 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16608 hDev, hImage, &memReq);
16612 memReq.memoryTypeBits,
16613 pAllocationCreateInfo,
16616 allocator->GetVulkanFunctions().vkDestroyImage(
16617 hDev, hImage, allocator->GetAllocationCallbacks());
16627 VMA_ASSERT(allocator && pCreateInfo && pPool);
16629 VMA_DEBUG_LOG(
"vmaCreatePool");
16631 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16633 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16635 #if VMA_RECORDING_ENABLED
16636 if(allocator->GetRecorder() != VMA_NULL)
16638 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16649 VMA_ASSERT(allocator);
16651 if(pool == VK_NULL_HANDLE)
16656 VMA_DEBUG_LOG(
"vmaDestroyPool");
16658 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16660 #if VMA_RECORDING_ENABLED
16661 if(allocator->GetRecorder() != VMA_NULL)
16663 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16667 allocator->DestroyPool(pool);
16675 VMA_ASSERT(allocator && pool && pPoolStats);
16677 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16679 allocator->GetPoolStats(pool, pPoolStats);
16685 size_t* pLostAllocationCount)
16687 VMA_ASSERT(allocator && pool);
16689 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16691 #if VMA_RECORDING_ENABLED
16692 if(allocator->GetRecorder() != VMA_NULL)
16694 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16698 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16703 VMA_ASSERT(allocator && pool);
16705 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16707 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16709 return allocator->CheckPoolCorruption(pool);
16715 const char** ppName)
16717 VMA_ASSERT(allocator && pool);
16719 VMA_DEBUG_LOG(
"vmaGetPoolName");
16721 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16723 *ppName = pool->GetName();
16731 VMA_ASSERT(allocator && pool);
16733 VMA_DEBUG_LOG(
"vmaSetPoolName");
16735 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16737 pool->SetName(pName);
16739 #if VMA_RECORDING_ENABLED
16740 if(allocator->GetRecorder() != VMA_NULL)
16742 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
16749 const VkMemoryRequirements* pVkMemoryRequirements,
16754 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16756 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16758 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16760 VkResult result = allocator->AllocateMemory(
16761 *pVkMemoryRequirements,
16767 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16771 #if VMA_RECORDING_ENABLED
16772 if(allocator->GetRecorder() != VMA_NULL)
16774 allocator->GetRecorder()->RecordAllocateMemory(
16775 allocator->GetCurrentFrameIndex(),
16776 *pVkMemoryRequirements,
16782 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16784 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16792 const VkMemoryRequirements* pVkMemoryRequirements,
16794 size_t allocationCount,
16798 if(allocationCount == 0)
16803 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16805 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16807 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16809 VkResult result = allocator->AllocateMemory(
16810 *pVkMemoryRequirements,
16816 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16820 #if VMA_RECORDING_ENABLED
16821 if(allocator->GetRecorder() != VMA_NULL)
16823 allocator->GetRecorder()->RecordAllocateMemoryPages(
16824 allocator->GetCurrentFrameIndex(),
16825 *pVkMemoryRequirements,
16827 (uint64_t)allocationCount,
16832 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16834 for(
size_t i = 0; i < allocationCount; ++i)
16836 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16850 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16852 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16854 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16856 VkMemoryRequirements vkMemReq = {};
16857 bool requiresDedicatedAllocation =
false;
16858 bool prefersDedicatedAllocation =
false;
16859 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16860 requiresDedicatedAllocation,
16861 prefersDedicatedAllocation);
16863 VkResult result = allocator->AllocateMemory(
16865 requiresDedicatedAllocation,
16866 prefersDedicatedAllocation,
16870 VMA_SUBALLOCATION_TYPE_BUFFER,
16874 #if VMA_RECORDING_ENABLED
16875 if(allocator->GetRecorder() != VMA_NULL)
16877 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16878 allocator->GetCurrentFrameIndex(),
16880 requiresDedicatedAllocation,
16881 prefersDedicatedAllocation,
16887 if(pAllocationInfo && result == VK_SUCCESS)
16889 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16902 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16904 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16906 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16908 VkMemoryRequirements vkMemReq = {};
16909 bool requiresDedicatedAllocation =
false;
16910 bool prefersDedicatedAllocation =
false;
16911 allocator->GetImageMemoryRequirements(image, vkMemReq,
16912 requiresDedicatedAllocation, prefersDedicatedAllocation);
16914 VkResult result = allocator->AllocateMemory(
16916 requiresDedicatedAllocation,
16917 prefersDedicatedAllocation,
16921 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16925 #if VMA_RECORDING_ENABLED
16926 if(allocator->GetRecorder() != VMA_NULL)
16928 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16929 allocator->GetCurrentFrameIndex(),
16931 requiresDedicatedAllocation,
16932 prefersDedicatedAllocation,
16938 if(pAllocationInfo && result == VK_SUCCESS)
16940 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16950 VMA_ASSERT(allocator);
16952 if(allocation == VK_NULL_HANDLE)
16957 VMA_DEBUG_LOG(
"vmaFreeMemory");
16959 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16961 #if VMA_RECORDING_ENABLED
16962 if(allocator->GetRecorder() != VMA_NULL)
16964 allocator->GetRecorder()->RecordFreeMemory(
16965 allocator->GetCurrentFrameIndex(),
16970 allocator->FreeMemory(
16977 size_t allocationCount,
16980 if(allocationCount == 0)
16985 VMA_ASSERT(allocator);
16987 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16989 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16991 #if VMA_RECORDING_ENABLED
16992 if(allocator->GetRecorder() != VMA_NULL)
16994 allocator->GetRecorder()->RecordFreeMemoryPages(
16995 allocator->GetCurrentFrameIndex(),
16996 (uint64_t)allocationCount,
17001 allocator->FreeMemory(allocationCount, pAllocations);
17007 VkDeviceSize newSize)
17009 VMA_ASSERT(allocator && allocation);
17011 VMA_DEBUG_LOG(
"vmaResizeAllocation");
17013 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17015 return allocator->ResizeAllocation(allocation, newSize);
17023 VMA_ASSERT(allocator && allocation && pAllocationInfo);
17025 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17027 #if VMA_RECORDING_ENABLED
17028 if(allocator->GetRecorder() != VMA_NULL)
17030 allocator->GetRecorder()->RecordGetAllocationInfo(
17031 allocator->GetCurrentFrameIndex(),
17036 allocator->GetAllocationInfo(allocation, pAllocationInfo);
17043 VMA_ASSERT(allocator && allocation);
17045 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17047 #if VMA_RECORDING_ENABLED
17048 if(allocator->GetRecorder() != VMA_NULL)
17050 allocator->GetRecorder()->RecordTouchAllocation(
17051 allocator->GetCurrentFrameIndex(),
17056 return allocator->TouchAllocation(allocation);
17064 VMA_ASSERT(allocator && allocation);
17066 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17068 allocation->SetUserData(allocator, pUserData);
17070 #if VMA_RECORDING_ENABLED
17071 if(allocator->GetRecorder() != VMA_NULL)
17073 allocator->GetRecorder()->RecordSetAllocationUserData(
17074 allocator->GetCurrentFrameIndex(),
17085 VMA_ASSERT(allocator && pAllocation);
17087 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17089 allocator->CreateLostAllocation(pAllocation);
17091 #if VMA_RECORDING_ENABLED
17092 if(allocator->GetRecorder() != VMA_NULL)
17094 allocator->GetRecorder()->RecordCreateLostAllocation(
17095 allocator->GetCurrentFrameIndex(),
17106 VMA_ASSERT(allocator && allocation && ppData);
17108 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17110 VkResult res = allocator->Map(allocation, ppData);
17112 #if VMA_RECORDING_ENABLED
17113 if(allocator->GetRecorder() != VMA_NULL)
17115 allocator->GetRecorder()->RecordMapMemory(
17116 allocator->GetCurrentFrameIndex(),
17128 VMA_ASSERT(allocator && allocation);
17130 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17132 #if VMA_RECORDING_ENABLED
17133 if(allocator->GetRecorder() != VMA_NULL)
17135 allocator->GetRecorder()->RecordUnmapMemory(
17136 allocator->GetCurrentFrameIndex(),
17141 allocator->Unmap(allocation);
17146 VMA_ASSERT(allocator && allocation);
17148 VMA_DEBUG_LOG(
"vmaFlushAllocation");
17150 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17152 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
17154 #if VMA_RECORDING_ENABLED
17155 if(allocator->GetRecorder() != VMA_NULL)
17157 allocator->GetRecorder()->RecordFlushAllocation(
17158 allocator->GetCurrentFrameIndex(),
17159 allocation, offset, size);
17166 VMA_ASSERT(allocator && allocation);
17168 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
17170 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17172 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
17174 #if VMA_RECORDING_ENABLED
17175 if(allocator->GetRecorder() != VMA_NULL)
17177 allocator->GetRecorder()->RecordInvalidateAllocation(
17178 allocator->GetCurrentFrameIndex(),
17179 allocation, offset, size);
17186 VMA_ASSERT(allocator);
17188 VMA_DEBUG_LOG(
"vmaCheckCorruption");
17190 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17192 return allocator->CheckCorruption(memoryTypeBits);
17198 size_t allocationCount,
17199 VkBool32* pAllocationsChanged,
17209 if(pDefragmentationInfo != VMA_NULL)
17223 if(res == VK_NOT_READY)
17236 VMA_ASSERT(allocator && pInfo && pContext);
17247 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
17249 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
17251 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17253 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
17255 #if VMA_RECORDING_ENABLED
17256 if(allocator->GetRecorder() != VMA_NULL)
17258 allocator->GetRecorder()->RecordDefragmentationBegin(
17259 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
17270 VMA_ASSERT(allocator);
17272 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
17274 if(context != VK_NULL_HANDLE)
17276 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17278 #if VMA_RECORDING_ENABLED
17279 if(allocator->GetRecorder() != VMA_NULL)
17281 allocator->GetRecorder()->RecordDefragmentationEnd(
17282 allocator->GetCurrentFrameIndex(), context);
17286 return allocator->DefragmentationEnd(context);
17299 VMA_ASSERT(allocator && allocation && buffer);
17301 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
17303 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17305 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
17311 VkDeviceSize allocationLocalOffset,
17315 VMA_ASSERT(allocator && allocation && buffer);
17317 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
17319 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17321 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
17329 VMA_ASSERT(allocator && allocation && image);
17331 VMA_DEBUG_LOG(
"vmaBindImageMemory");
17333 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17335 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
17341 VkDeviceSize allocationLocalOffset,
17345 VMA_ASSERT(allocator && allocation && image);
17347 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
17349 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17351 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
17356 const VkBufferCreateInfo* pBufferCreateInfo,
17362 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
17364 if(pBufferCreateInfo->size == 0)
17366 return VK_ERROR_VALIDATION_FAILED_EXT;
17369 VMA_DEBUG_LOG(
"vmaCreateBuffer");
17371 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17373 *pBuffer = VK_NULL_HANDLE;
17374 *pAllocation = VK_NULL_HANDLE;
17377 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
17378 allocator->m_hDevice,
17380 allocator->GetAllocationCallbacks(),
17385 VkMemoryRequirements vkMemReq = {};
17386 bool requiresDedicatedAllocation =
false;
17387 bool prefersDedicatedAllocation =
false;
17388 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
17389 requiresDedicatedAllocation, prefersDedicatedAllocation);
17393 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
17395 VMA_ASSERT(vkMemReq.alignment %
17396 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
17398 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
17400 VMA_ASSERT(vkMemReq.alignment %
17401 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
17403 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
17405 VMA_ASSERT(vkMemReq.alignment %
17406 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
17410 res = allocator->AllocateMemory(
17412 requiresDedicatedAllocation,
17413 prefersDedicatedAllocation,
17416 *pAllocationCreateInfo,
17417 VMA_SUBALLOCATION_TYPE_BUFFER,
17421 #if VMA_RECORDING_ENABLED
17422 if(allocator->GetRecorder() != VMA_NULL)
17424 allocator->GetRecorder()->RecordCreateBuffer(
17425 allocator->GetCurrentFrameIndex(),
17426 *pBufferCreateInfo,
17427 *pAllocationCreateInfo,
17437 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
17442 #if VMA_STATS_STRING_ENABLED
17443 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
17445 if(pAllocationInfo != VMA_NULL)
17447 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17452 allocator->FreeMemory(
17455 *pAllocation = VK_NULL_HANDLE;
17456 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17457 *pBuffer = VK_NULL_HANDLE;
17460 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17461 *pBuffer = VK_NULL_HANDLE;
17472 VMA_ASSERT(allocator);
17474 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17479 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
17481 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17483 #if VMA_RECORDING_ENABLED
17484 if(allocator->GetRecorder() != VMA_NULL)
17486 allocator->GetRecorder()->RecordDestroyBuffer(
17487 allocator->GetCurrentFrameIndex(),
17492 if(buffer != VK_NULL_HANDLE)
17494 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
17497 if(allocation != VK_NULL_HANDLE)
17499 allocator->FreeMemory(
17507 const VkImageCreateInfo* pImageCreateInfo,
17513 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
17515 if(pImageCreateInfo->extent.width == 0 ||
17516 pImageCreateInfo->extent.height == 0 ||
17517 pImageCreateInfo->extent.depth == 0 ||
17518 pImageCreateInfo->mipLevels == 0 ||
17519 pImageCreateInfo->arrayLayers == 0)
17521 return VK_ERROR_VALIDATION_FAILED_EXT;
17524 VMA_DEBUG_LOG(
"vmaCreateImage");
17526 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17528 *pImage = VK_NULL_HANDLE;
17529 *pAllocation = VK_NULL_HANDLE;
17532 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
17533 allocator->m_hDevice,
17535 allocator->GetAllocationCallbacks(),
17539 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
17540 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
17541 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
17544 VkMemoryRequirements vkMemReq = {};
17545 bool requiresDedicatedAllocation =
false;
17546 bool prefersDedicatedAllocation =
false;
17547 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
17548 requiresDedicatedAllocation, prefersDedicatedAllocation);
17550 res = allocator->AllocateMemory(
17552 requiresDedicatedAllocation,
17553 prefersDedicatedAllocation,
17556 *pAllocationCreateInfo,
17561 #if VMA_RECORDING_ENABLED
17562 if(allocator->GetRecorder() != VMA_NULL)
17564 allocator->GetRecorder()->RecordCreateImage(
17565 allocator->GetCurrentFrameIndex(),
17567 *pAllocationCreateInfo,
17577 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
17582 #if VMA_STATS_STRING_ENABLED
17583 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
17585 if(pAllocationInfo != VMA_NULL)
17587 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17592 allocator->FreeMemory(
17595 *pAllocation = VK_NULL_HANDLE;
17596 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17597 *pImage = VK_NULL_HANDLE;
17600 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17601 *pImage = VK_NULL_HANDLE;
17612 VMA_ASSERT(allocator);
17614 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17619 VMA_DEBUG_LOG(
"vmaDestroyImage");
17621 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17623 #if VMA_RECORDING_ENABLED
17624 if(allocator->GetRecorder() != VMA_NULL)
17626 allocator->GetRecorder()->RecordDestroyImage(
17627 allocator->GetCurrentFrameIndex(),
17632 if(image != VK_NULL_HANDLE)
17634 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17636 if(allocation != VK_NULL_HANDLE)
17638 allocator->FreeMemory(
17644 #endif // #ifdef VMA_IMPLEMENTATION