23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1764 #ifndef VMA_RECORDING_ENABLED
1765 #define VMA_RECORDING_ENABLED 0
1769 #define NOMINMAX // For windows.h
1773 #include <vulkan/vulkan.h>
1776 #if VMA_RECORDING_ENABLED
1777 #include <windows.h>
1783 #if !defined(VMA_VULKAN_VERSION)
1784 #if defined(VK_VERSION_1_1)
1785 #define VMA_VULKAN_VERSION 1001000
1787 #define VMA_VULKAN_VERSION 1000000
1791 #if !defined(VMA_DEDICATED_ALLOCATION)
1792 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1793 #define VMA_DEDICATED_ALLOCATION 1
1795 #define VMA_DEDICATED_ALLOCATION 0
1799 #if !defined(VMA_BIND_MEMORY2)
1800 #if VK_KHR_bind_memory2
1801 #define VMA_BIND_MEMORY2 1
1803 #define VMA_BIND_MEMORY2 0
1807 #if !defined(VMA_MEMORY_BUDGET)
1808 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
1809 #define VMA_MEMORY_BUDGET 1
1811 #define VMA_MEMORY_BUDGET 0
1820 #ifndef VMA_CALL_PRE
1821 #define VMA_CALL_PRE
1823 #ifndef VMA_CALL_POST
1824 #define VMA_CALL_POST
1841 uint32_t memoryType,
1842 VkDeviceMemory memory,
1847 uint32_t memoryType,
1848 VkDeviceMemory memory,
1951 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
1952 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1953 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1955 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
1956 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
1957 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
1959 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
1960 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
2102 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
2110 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
2120 uint32_t memoryTypeIndex,
2121 VkMemoryPropertyFlags* pFlags);
2133 uint32_t frameIndex);
2229 #ifndef VMA_STATS_STRING_ENABLED
2230 #define VMA_STATS_STRING_ENABLED 1
2233 #if VMA_STATS_STRING_ENABLED
2240 char** ppStatsString,
2241 VkBool32 detailedMap);
2245 char* pStatsString);
2247 #endif // #if VMA_STATS_STRING_ENABLED
2499 uint32_t memoryTypeBits,
2501 uint32_t* pMemoryTypeIndex);
2517 const VkBufferCreateInfo* pBufferCreateInfo,
2519 uint32_t* pMemoryTypeIndex);
2535 const VkImageCreateInfo* pImageCreateInfo,
2537 uint32_t* pMemoryTypeIndex);
2709 size_t* pLostAllocationCount);
2736 const char** ppName);
2829 const VkMemoryRequirements* pVkMemoryRequirements,
2855 const VkMemoryRequirements* pVkMemoryRequirements,
2857 size_t allocationCount,
2902 size_t allocationCount,
2914 VkDeviceSize newSize);
3294 size_t allocationCount,
3295 VkBool32* pAllocationsChanged,
3329 VkDeviceSize allocationLocalOffset,
3363 VkDeviceSize allocationLocalOffset,
3395 const VkBufferCreateInfo* pBufferCreateInfo,
3420 const VkImageCreateInfo* pImageCreateInfo,
3446 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3449 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3450 #define VMA_IMPLEMENTATION
3453 #ifdef VMA_IMPLEMENTATION
3454 #undef VMA_IMPLEMENTATION
3476 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3477 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3489 #if VMA_USE_STL_CONTAINERS
3490 #define VMA_USE_STL_VECTOR 1
3491 #define VMA_USE_STL_UNORDERED_MAP 1
3492 #define VMA_USE_STL_LIST 1
3495 #ifndef VMA_USE_STL_SHARED_MUTEX
3497 #if __cplusplus >= 201703L
3498 #define VMA_USE_STL_SHARED_MUTEX 1
3502 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3503 #define VMA_USE_STL_SHARED_MUTEX 1
3505 #define VMA_USE_STL_SHARED_MUTEX 0
3513 #if VMA_USE_STL_VECTOR
3517 #if VMA_USE_STL_UNORDERED_MAP
3518 #include <unordered_map>
3521 #if VMA_USE_STL_LIST
3530 #include <algorithm>
3535 #define VMA_NULL nullptr
3538 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3540 void *aligned_alloc(
size_t alignment,
size_t size)
3543 if(alignment <
sizeof(
void*))
3545 alignment =
sizeof(
void*);
3548 return memalign(alignment, size);
3550 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3552 void *aligned_alloc(
size_t alignment,
size_t size)
3555 if(alignment <
sizeof(
void*))
3557 alignment =
sizeof(
void*);
3561 if(posix_memalign(&pointer, alignment, size) == 0)
3575 #define VMA_ASSERT(expr) assert(expr)
3577 #define VMA_ASSERT(expr)
3583 #ifndef VMA_HEAVY_ASSERT
3585 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3587 #define VMA_HEAVY_ASSERT(expr)
3591 #ifndef VMA_ALIGN_OF
3592 #define VMA_ALIGN_OF(type) (__alignof(type))
3595 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3597 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3599 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3603 #ifndef VMA_SYSTEM_FREE
3605 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3607 #define VMA_SYSTEM_FREE(ptr) free(ptr)
3612 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
3616 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
3620 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
3624 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
3627 #ifndef VMA_DEBUG_LOG
3628 #define VMA_DEBUG_LOG(format, ...)
3638 #if VMA_STATS_STRING_ENABLED
3639 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3641 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3643 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3645 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3647 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3649 snprintf(outStr, strLen,
"%p", ptr);
3657 void Lock() { m_Mutex.lock(); }
3658 void Unlock() { m_Mutex.unlock(); }
3662 #define VMA_MUTEX VmaMutex
3666 #ifndef VMA_RW_MUTEX
3667 #if VMA_USE_STL_SHARED_MUTEX
3669 #include <shared_mutex>
3673 void LockRead() { m_Mutex.lock_shared(); }
3674 void UnlockRead() { m_Mutex.unlock_shared(); }
3675 void LockWrite() { m_Mutex.lock(); }
3676 void UnlockWrite() { m_Mutex.unlock(); }
3678 std::shared_mutex m_Mutex;
3680 #define VMA_RW_MUTEX VmaRWMutex
3681 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
3687 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3688 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3689 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3690 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3691 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3695 #define VMA_RW_MUTEX VmaRWMutex
3701 void LockRead() { m_Mutex.Lock(); }
3702 void UnlockRead() { m_Mutex.Unlock(); }
3703 void LockWrite() { m_Mutex.Lock(); }
3704 void UnlockWrite() { m_Mutex.Unlock(); }
3708 #define VMA_RW_MUTEX VmaRWMutex
3709 #endif // #if VMA_USE_STL_SHARED_MUTEX
3710 #endif // #ifndef VMA_RW_MUTEX
3715 #ifndef VMA_ATOMIC_UINT32
3717 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
3720 #ifndef VMA_ATOMIC_UINT64
3722 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
3725 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
3730 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
3733 #ifndef VMA_DEBUG_ALIGNMENT
3738 #define VMA_DEBUG_ALIGNMENT (1)
3741 #ifndef VMA_DEBUG_MARGIN
3746 #define VMA_DEBUG_MARGIN (0)
3749 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
3754 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
3757 #ifndef VMA_DEBUG_DETECT_CORRUPTION
3763 #define VMA_DEBUG_DETECT_CORRUPTION (0)
3766 #ifndef VMA_DEBUG_GLOBAL_MUTEX
3771 #define VMA_DEBUG_GLOBAL_MUTEX (0)
3774 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
3779 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
3782 #ifndef VMA_SMALL_HEAP_MAX_SIZE
3783 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
3787 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
3788 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
3792 #ifndef VMA_CLASS_NO_COPY
3793 #define VMA_CLASS_NO_COPY(className) \
3795 className(const className&) = delete; \
3796 className& operator=(const className&) = delete;
3799 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3802 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3804 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3805 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3811 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3813 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3814 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3817 static inline uint32_t VmaCountBitsSet(uint32_t v)
3819 uint32_t c = v - ((v >> 1) & 0x55555555);
3820 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3821 c = ((c >> 4) + c) & 0x0F0F0F0F;
3822 c = ((c >> 8) + c) & 0x00FF00FF;
3823 c = ((c >> 16) + c) & 0x0000FFFF;
3829 template <
typename T>
3830 static inline T VmaAlignUp(T val, T align)
3832 return (val + align - 1) / align * align;
3836 template <
typename T>
3837 static inline T VmaAlignDown(T val, T align)
3839 return val / align * align;
3843 template <
typename T>
3844 static inline T VmaRoundDiv(T x, T y)
3846 return (x + (y / (T)2)) / y;
3854 template <
typename T>
3855 inline bool VmaIsPow2(T x)
3857 return (x & (x-1)) == 0;
3861 static inline uint32_t VmaNextPow2(uint32_t v)
3872 static inline uint64_t VmaNextPow2(uint64_t v)
3886 static inline uint32_t VmaPrevPow2(uint32_t v)
3896 static inline uint64_t VmaPrevPow2(uint64_t v)
3908 static inline bool VmaStrIsEmpty(
const char* pStr)
3910 return pStr == VMA_NULL || *pStr ==
'\0';
3913 #if VMA_STATS_STRING_ENABLED
3915 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3931 #endif // #if VMA_STATS_STRING_ENABLED
3935 template<
typename Iterator,
typename Compare>
3936 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3938 Iterator centerValue = end; --centerValue;
3939 Iterator insertIndex = beg;
3940 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3942 if(cmp(*memTypeIndex, *centerValue))
3944 if(insertIndex != memTypeIndex)
3946 VMA_SWAP(*memTypeIndex, *insertIndex);
3951 if(insertIndex != centerValue)
3953 VMA_SWAP(*insertIndex, *centerValue);
3958 template<
typename Iterator,
typename Compare>
3959 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3963 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3964 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3965 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3969 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
3971 #endif // #ifndef VMA_SORT
3980 static inline bool VmaBlocksOnSamePage(
3981 VkDeviceSize resourceAOffset,
3982 VkDeviceSize resourceASize,
3983 VkDeviceSize resourceBOffset,
3984 VkDeviceSize pageSize)
3986 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3987 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3988 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3989 VkDeviceSize resourceBStart = resourceBOffset;
3990 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3991 return resourceAEndPage == resourceBStartPage;
3994 enum VmaSuballocationType
3996 VMA_SUBALLOCATION_TYPE_FREE = 0,
3997 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3998 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3999 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4000 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4001 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4002 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4011 static inline bool VmaIsBufferImageGranularityConflict(
4012 VmaSuballocationType suballocType1,
4013 VmaSuballocationType suballocType2)
4015 if(suballocType1 > suballocType2)
4017 VMA_SWAP(suballocType1, suballocType2);
4020 switch(suballocType1)
4022 case VMA_SUBALLOCATION_TYPE_FREE:
4024 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4026 case VMA_SUBALLOCATION_TYPE_BUFFER:
4028 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4029 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4030 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4032 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4033 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4034 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4035 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4037 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4038 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4046 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4048 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4049 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4050 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4051 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4053 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4060 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4062 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4063 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4064 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4065 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4067 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4080 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4082 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4083 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4084 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4085 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4091 VMA_CLASS_NO_COPY(VmaMutexLock)
4093 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4094 m_pMutex(useMutex ? &mutex : VMA_NULL)
4095 {
if(m_pMutex) { m_pMutex->Lock(); } }
4097 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4099 VMA_MUTEX* m_pMutex;
4103 struct VmaMutexLockRead
4105 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4107 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4108 m_pMutex(useMutex ? &mutex : VMA_NULL)
4109 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4110 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4112 VMA_RW_MUTEX* m_pMutex;
4116 struct VmaMutexLockWrite
4118 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4120 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4121 m_pMutex(useMutex ? &mutex : VMA_NULL)
4122 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4123 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4125 VMA_RW_MUTEX* m_pMutex;
4128 #if VMA_DEBUG_GLOBAL_MUTEX
4129 static VMA_MUTEX gDebugGlobalMutex;
4130 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4132 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4136 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4147 template <
typename CmpLess,
typename IterT,
typename KeyT>
4148 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4150 size_t down = 0, up = (end - beg);
4153 const size_t mid = (down + up) / 2;
4154 if(cmp(*(beg+mid), key))
4166 template<
typename CmpLess,
typename IterT,
typename KeyT>
4167 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4169 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4170 beg, end, value, cmp);
4172 (!cmp(*it, value) && !cmp(value, *it)))
4184 template<
typename T>
4185 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4187 for(uint32_t i = 0; i < count; ++i)
4189 const T iPtr = arr[i];
4190 if(iPtr == VMA_NULL)
4194 for(uint32_t j = i + 1; j < count; ++j)
4208 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4210 if((pAllocationCallbacks != VMA_NULL) &&
4211 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4213 return (*pAllocationCallbacks->pfnAllocation)(
4214 pAllocationCallbacks->pUserData,
4217 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4221 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4225 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4227 if((pAllocationCallbacks != VMA_NULL) &&
4228 (pAllocationCallbacks->pfnFree != VMA_NULL))
4230 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4234 VMA_SYSTEM_FREE(ptr);
4238 template<
typename T>
4239 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4241 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4244 template<
typename T>
4245 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4247 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4250 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4252 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4254 template<
typename T>
4255 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4258 VmaFree(pAllocationCallbacks, ptr);
4261 template<
typename T>
4262 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4266 for(
size_t i = count; i--; )
4270 VmaFree(pAllocationCallbacks, ptr);
4274 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4276 if(srcStr != VMA_NULL)
4278 const size_t len = strlen(srcStr);
4279 char*
const result = vma_new_array(allocs,
char, len + 1);
4280 memcpy(result, srcStr, len + 1);
4289 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4293 const size_t len = strlen(str);
4294 vma_delete_array(allocs, str, len + 1);
4299 template<
typename T>
4300 class VmaStlAllocator
4303 const VkAllocationCallbacks*
const m_pCallbacks;
4304 typedef T value_type;
4306 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4307 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4309 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4310 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4312 template<
typename U>
4313 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4315 return m_pCallbacks == rhs.m_pCallbacks;
4317 template<
typename U>
4318 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4320 return m_pCallbacks != rhs.m_pCallbacks;
4323 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4326 #if VMA_USE_STL_VECTOR
4328 #define VmaVector std::vector
4330 template<
typename T,
typename allocatorT>
4331 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4333 vec.insert(vec.begin() + index, item);
4336 template<
typename T,
typename allocatorT>
4337 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4339 vec.erase(vec.begin() + index);
4342 #else // #if VMA_USE_STL_VECTOR
4347 template<
typename T,
typename AllocatorT>
4351 typedef T value_type;
4353 VmaVector(
const AllocatorT& allocator) :
4354 m_Allocator(allocator),
4361 VmaVector(
size_t count,
const AllocatorT& allocator) :
4362 m_Allocator(allocator),
4363 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4371 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4372 : VmaVector(count, allocator) {}
4374 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4375 m_Allocator(src.m_Allocator),
4376 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4377 m_Count(src.m_Count),
4378 m_Capacity(src.m_Count)
4382 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4388 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4391 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4395 resize(rhs.m_Count);
4398 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4404 bool empty()
const {
return m_Count == 0; }
4405 size_t size()
const {
return m_Count; }
4406 T* data() {
return m_pArray; }
4407 const T* data()
const {
return m_pArray; }
4409 T& operator[](
size_t index)
4411 VMA_HEAVY_ASSERT(index < m_Count);
4412 return m_pArray[index];
4414 const T& operator[](
size_t index)
const
4416 VMA_HEAVY_ASSERT(index < m_Count);
4417 return m_pArray[index];
4422 VMA_HEAVY_ASSERT(m_Count > 0);
4425 const T& front()
const
4427 VMA_HEAVY_ASSERT(m_Count > 0);
4432 VMA_HEAVY_ASSERT(m_Count > 0);
4433 return m_pArray[m_Count - 1];
4435 const T& back()
const
4437 VMA_HEAVY_ASSERT(m_Count > 0);
4438 return m_pArray[m_Count - 1];
4441 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4443 newCapacity = VMA_MAX(newCapacity, m_Count);
4445 if((newCapacity < m_Capacity) && !freeMemory)
4447 newCapacity = m_Capacity;
4450 if(newCapacity != m_Capacity)
4452 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4455 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4457 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4458 m_Capacity = newCapacity;
4459 m_pArray = newArray;
4463 void resize(
size_t newCount,
bool freeMemory =
false)
4465 size_t newCapacity = m_Capacity;
4466 if(newCount > m_Capacity)
4468 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4472 newCapacity = newCount;
4475 if(newCapacity != m_Capacity)
4477 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4478 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4479 if(elementsToCopy != 0)
4481 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4483 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4484 m_Capacity = newCapacity;
4485 m_pArray = newArray;
4491 void clear(
bool freeMemory =
false)
4493 resize(0, freeMemory);
4496 void insert(
size_t index,
const T& src)
4498 VMA_HEAVY_ASSERT(index <= m_Count);
4499 const size_t oldCount = size();
4500 resize(oldCount + 1);
4501 if(index < oldCount)
4503 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4505 m_pArray[index] = src;
4508 void remove(
size_t index)
4510 VMA_HEAVY_ASSERT(index < m_Count);
4511 const size_t oldCount = size();
4512 if(index < oldCount - 1)
4514 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4516 resize(oldCount - 1);
4519 void push_back(
const T& src)
4521 const size_t newIndex = size();
4522 resize(newIndex + 1);
4523 m_pArray[newIndex] = src;
4528 VMA_HEAVY_ASSERT(m_Count > 0);
4532 void push_front(
const T& src)
4539 VMA_HEAVY_ASSERT(m_Count > 0);
4543 typedef T* iterator;
4545 iterator begin() {
return m_pArray; }
4546 iterator end() {
return m_pArray + m_Count; }
4549 AllocatorT m_Allocator;
4555 template<
typename T,
typename allocatorT>
4556 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4558 vec.insert(index, item);
4561 template<
typename T,
typename allocatorT>
4562 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4567 #endif // #if VMA_USE_STL_VECTOR
4569 template<
typename CmpLess,
typename VectorT>
4570 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4572 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4574 vector.data() + vector.size(),
4576 CmpLess()) - vector.data();
4577 VmaVectorInsert(vector, indexToInsert, value);
4578 return indexToInsert;
4581 template<
typename CmpLess,
typename VectorT>
4582 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4585 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4590 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4592 size_t indexToRemove = it - vector.begin();
4593 VmaVectorRemove(vector, indexToRemove);
4607 template<
typename T>
4608 class VmaPoolAllocator
4610 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4612 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4613 ~VmaPoolAllocator();
4620 uint32_t NextFreeIndex;
4621 alignas(T)
char Value[
sizeof(T)];
4628 uint32_t FirstFreeIndex;
4631 const VkAllocationCallbacks* m_pAllocationCallbacks;
4632 const uint32_t m_FirstBlockCapacity;
4633 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4635 ItemBlock& CreateNewBlock();
4638 template<
typename T>
4639 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4640 m_pAllocationCallbacks(pAllocationCallbacks),
4641 m_FirstBlockCapacity(firstBlockCapacity),
4642 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4644 VMA_ASSERT(m_FirstBlockCapacity > 1);
4647 template<
typename T>
4648 VmaPoolAllocator<T>::~VmaPoolAllocator()
4650 for(
size_t i = m_ItemBlocks.size(); i--; )
4651 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4652 m_ItemBlocks.clear();
4655 template<
typename T>
4656 T* VmaPoolAllocator<T>::Alloc()
4658 for(
size_t i = m_ItemBlocks.size(); i--; )
4660 ItemBlock& block = m_ItemBlocks[i];
4662 if(block.FirstFreeIndex != UINT32_MAX)
4664 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4665 block.FirstFreeIndex = pItem->NextFreeIndex;
4666 T* result = (T*)&pItem->Value;
4673 ItemBlock& newBlock = CreateNewBlock();
4674 Item*
const pItem = &newBlock.pItems[0];
4675 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4676 T* result = (T*)&pItem->Value;
4681 template<
typename T>
4682 void VmaPoolAllocator<T>::Free(T* ptr)
4685 for(
size_t i = m_ItemBlocks.size(); i--; )
4687 ItemBlock& block = m_ItemBlocks[i];
4691 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4694 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4697 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4698 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4699 block.FirstFreeIndex = index;
4703 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4706 template<
typename T>
4707 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4709 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4710 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4712 const ItemBlock newBlock = {
4713 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4717 m_ItemBlocks.push_back(newBlock);
4720 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4721 newBlock.pItems[i].NextFreeIndex = i + 1;
4722 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4723 return m_ItemBlocks.back();
4729 #if VMA_USE_STL_LIST
4731 #define VmaList std::list
4733 #else // #if VMA_USE_STL_LIST
4735 template<
typename T>
4744 template<
typename T>
4747 VMA_CLASS_NO_COPY(VmaRawList)
4749 typedef VmaListItem<T> ItemType;
4751 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4755 size_t GetCount()
const {
return m_Count; }
4756 bool IsEmpty()
const {
return m_Count == 0; }
4758 ItemType* Front() {
return m_pFront; }
4759 const ItemType* Front()
const {
return m_pFront; }
4760 ItemType* Back() {
return m_pBack; }
4761 const ItemType* Back()
const {
return m_pBack; }
4763 ItemType* PushBack();
4764 ItemType* PushFront();
4765 ItemType* PushBack(
const T& value);
4766 ItemType* PushFront(
const T& value);
4771 ItemType* InsertBefore(ItemType* pItem);
4773 ItemType* InsertAfter(ItemType* pItem);
4775 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4776 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4778 void Remove(ItemType* pItem);
4781 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4782 VmaPoolAllocator<ItemType> m_ItemAllocator;
4788 template<
typename T>
4789 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4790 m_pAllocationCallbacks(pAllocationCallbacks),
4791 m_ItemAllocator(pAllocationCallbacks, 128),
4798 template<
typename T>
4799 VmaRawList<T>::~VmaRawList()
4805 template<
typename T>
4806 void VmaRawList<T>::Clear()
4808 if(IsEmpty() ==
false)
4810 ItemType* pItem = m_pBack;
4811 while(pItem != VMA_NULL)
4813 ItemType*
const pPrevItem = pItem->pPrev;
4814 m_ItemAllocator.Free(pItem);
4817 m_pFront = VMA_NULL;
4823 template<
typename T>
4824 VmaListItem<T>* VmaRawList<T>::PushBack()
4826 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4827 pNewItem->pNext = VMA_NULL;
4830 pNewItem->pPrev = VMA_NULL;
4831 m_pFront = pNewItem;
4837 pNewItem->pPrev = m_pBack;
4838 m_pBack->pNext = pNewItem;
4845 template<
typename T>
4846 VmaListItem<T>* VmaRawList<T>::PushFront()
4848 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4849 pNewItem->pPrev = VMA_NULL;
4852 pNewItem->pNext = VMA_NULL;
4853 m_pFront = pNewItem;
4859 pNewItem->pNext = m_pFront;
4860 m_pFront->pPrev = pNewItem;
4861 m_pFront = pNewItem;
4867 template<
typename T>
4868 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4870 ItemType*
const pNewItem = PushBack();
4871 pNewItem->Value = value;
4875 template<
typename T>
4876 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4878 ItemType*
const pNewItem = PushFront();
4879 pNewItem->Value = value;
4883 template<
typename T>
4884 void VmaRawList<T>::PopBack()
4886 VMA_HEAVY_ASSERT(m_Count > 0);
4887 ItemType*
const pBackItem = m_pBack;
4888 ItemType*
const pPrevItem = pBackItem->pPrev;
4889 if(pPrevItem != VMA_NULL)
4891 pPrevItem->pNext = VMA_NULL;
4893 m_pBack = pPrevItem;
4894 m_ItemAllocator.Free(pBackItem);
4898 template<
typename T>
4899 void VmaRawList<T>::PopFront()
4901 VMA_HEAVY_ASSERT(m_Count > 0);
4902 ItemType*
const pFrontItem = m_pFront;
4903 ItemType*
const pNextItem = pFrontItem->pNext;
4904 if(pNextItem != VMA_NULL)
4906 pNextItem->pPrev = VMA_NULL;
4908 m_pFront = pNextItem;
4909 m_ItemAllocator.Free(pFrontItem);
4913 template<
typename T>
4914 void VmaRawList<T>::Remove(ItemType* pItem)
4916 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4917 VMA_HEAVY_ASSERT(m_Count > 0);
4919 if(pItem->pPrev != VMA_NULL)
4921 pItem->pPrev->pNext = pItem->pNext;
4925 VMA_HEAVY_ASSERT(m_pFront == pItem);
4926 m_pFront = pItem->pNext;
4929 if(pItem->pNext != VMA_NULL)
4931 pItem->pNext->pPrev = pItem->pPrev;
4935 VMA_HEAVY_ASSERT(m_pBack == pItem);
4936 m_pBack = pItem->pPrev;
4939 m_ItemAllocator.Free(pItem);
4943 template<
typename T>
4944 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4946 if(pItem != VMA_NULL)
4948 ItemType*
const prevItem = pItem->pPrev;
4949 ItemType*
const newItem = m_ItemAllocator.Alloc();
4950 newItem->pPrev = prevItem;
4951 newItem->pNext = pItem;
4952 pItem->pPrev = newItem;
4953 if(prevItem != VMA_NULL)
4955 prevItem->pNext = newItem;
4959 VMA_HEAVY_ASSERT(m_pFront == pItem);
4969 template<
typename T>
4970 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4972 if(pItem != VMA_NULL)
4974 ItemType*
const nextItem = pItem->pNext;
4975 ItemType*
const newItem = m_ItemAllocator.Alloc();
4976 newItem->pNext = nextItem;
4977 newItem->pPrev = pItem;
4978 pItem->pNext = newItem;
4979 if(nextItem != VMA_NULL)
4981 nextItem->pPrev = newItem;
4985 VMA_HEAVY_ASSERT(m_pBack == pItem);
4995 template<
typename T>
4996 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4998 ItemType*
const newItem = InsertBefore(pItem);
4999 newItem->Value = value;
5003 template<
typename T>
5004 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5006 ItemType*
const newItem = InsertAfter(pItem);
5007 newItem->Value = value;
5011 template<
typename T,
typename AllocatorT>
5014 VMA_CLASS_NO_COPY(VmaList)
5025 T& operator*()
const
5027 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5028 return m_pItem->Value;
5030 T* operator->()
const
5032 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5033 return &m_pItem->Value;
5036 iterator& operator++()
5038 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5039 m_pItem = m_pItem->pNext;
5042 iterator& operator--()
5044 if(m_pItem != VMA_NULL)
5046 m_pItem = m_pItem->pPrev;
5050 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5051 m_pItem = m_pList->Back();
5056 iterator operator++(
int)
5058 iterator result = *
this;
5062 iterator operator--(
int)
5064 iterator result = *
this;
5069 bool operator==(
const iterator& rhs)
const
5071 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5072 return m_pItem == rhs.m_pItem;
5074 bool operator!=(
const iterator& rhs)
const
5076 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5077 return m_pItem != rhs.m_pItem;
5081 VmaRawList<T>* m_pList;
5082 VmaListItem<T>* m_pItem;
5084 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5090 friend class VmaList<T, AllocatorT>;
5093 class const_iterator
5102 const_iterator(
const iterator& src) :
5103 m_pList(src.m_pList),
5104 m_pItem(src.m_pItem)
5108 const T& operator*()
const
5110 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5111 return m_pItem->Value;
5113 const T* operator->()
const
5115 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5116 return &m_pItem->Value;
5119 const_iterator& operator++()
5121 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5122 m_pItem = m_pItem->pNext;
5125 const_iterator& operator--()
5127 if(m_pItem != VMA_NULL)
5129 m_pItem = m_pItem->pPrev;
5133 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5134 m_pItem = m_pList->Back();
5139 const_iterator operator++(
int)
5141 const_iterator result = *
this;
5145 const_iterator operator--(
int)
5147 const_iterator result = *
this;
5152 bool operator==(
const const_iterator& rhs)
const
5154 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5155 return m_pItem == rhs.m_pItem;
5157 bool operator!=(
const const_iterator& rhs)
const
5159 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5160 return m_pItem != rhs.m_pItem;
5164 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5170 const VmaRawList<T>* m_pList;
5171 const VmaListItem<T>* m_pItem;
5173 friend class VmaList<T, AllocatorT>;
5176 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5178 bool empty()
const {
return m_RawList.IsEmpty(); }
5179 size_t size()
const {
return m_RawList.GetCount(); }
5181 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5182 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5184 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5185 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5187 void clear() { m_RawList.Clear(); }
5188 void push_back(
const T& value) { m_RawList.PushBack(value); }
5189 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5190 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5193 VmaRawList<T> m_RawList;
5196 #endif // #if VMA_USE_STL_LIST
5204 #if VMA_USE_STL_UNORDERED_MAP
5206 #define VmaPair std::pair
5208 #define VMA_MAP_TYPE(KeyT, ValueT) \
5209 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5211 #else // #if VMA_USE_STL_UNORDERED_MAP
5213 template<
typename T1,
typename T2>
5219 VmaPair() : first(), second() { }
5220 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5226 template<
typename KeyT,
typename ValueT>
5230 typedef VmaPair<KeyT, ValueT> PairType;
5231 typedef PairType* iterator;
5233 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5235 iterator begin() {
return m_Vector.begin(); }
5236 iterator end() {
return m_Vector.end(); }
5238 void insert(
const PairType& pair);
5239 iterator find(
const KeyT& key);
5240 void erase(iterator it);
5243 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5246 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5248 template<
typename FirstT,
typename SecondT>
5249 struct VmaPairFirstLess
5251 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5253 return lhs.first < rhs.first;
5255 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5257 return lhs.first < rhsFirst;
5261 template<
typename KeyT,
typename ValueT>
5262 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5264 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5266 m_Vector.data() + m_Vector.size(),
5268 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5269 VmaVectorInsert(m_Vector, indexToInsert, pair);
5272 template<
typename KeyT,
typename ValueT>
5273 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5275 PairType* it = VmaBinaryFindFirstNotLess(
5277 m_Vector.data() + m_Vector.size(),
5279 VmaPairFirstLess<KeyT, ValueT>());
5280 if((it != m_Vector.end()) && (it->first == key))
5286 return m_Vector.end();
5290 template<
typename KeyT,
typename ValueT>
5291 void VmaMap<KeyT, ValueT>::erase(iterator it)
5293 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5296 #endif // #if VMA_USE_STL_UNORDERED_MAP
5302 class VmaDeviceMemoryBlock;
5304 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5306 struct VmaAllocation_T
5309 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5313 FLAG_USER_DATA_STRING = 0x01,
5317 enum ALLOCATION_TYPE
5319 ALLOCATION_TYPE_NONE,
5320 ALLOCATION_TYPE_BLOCK,
5321 ALLOCATION_TYPE_DEDICATED,
5328 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
5332 m_MemoryTypeIndex = 0;
5333 m_pUserData = VMA_NULL;
5334 m_LastUseFrameIndex = currentFrameIndex;
5335 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
5336 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
5338 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
5340 #if VMA_STATS_STRING_ENABLED
5341 m_CreationFrameIndex = currentFrameIndex;
5342 m_BufferImageUsage = 0;
5348 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5351 VMA_ASSERT(m_pUserData == VMA_NULL);
5354 void InitBlockAllocation(
5355 VmaDeviceMemoryBlock* block,
5356 VkDeviceSize offset,
5357 VkDeviceSize alignment,
5359 uint32_t memoryTypeIndex,
5360 VmaSuballocationType suballocationType,
5364 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5365 VMA_ASSERT(block != VMA_NULL);
5366 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5367 m_Alignment = alignment;
5369 m_MemoryTypeIndex = memoryTypeIndex;
5370 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5371 m_SuballocationType = (uint8_t)suballocationType;
5372 m_BlockAllocation.m_Block = block;
5373 m_BlockAllocation.m_Offset = offset;
5374 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5379 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5380 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5381 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5382 m_MemoryTypeIndex = 0;
5383 m_BlockAllocation.m_Block = VMA_NULL;
5384 m_BlockAllocation.m_Offset = 0;
5385 m_BlockAllocation.m_CanBecomeLost =
true;
5388 void ChangeBlockAllocation(
5390 VmaDeviceMemoryBlock* block,
5391 VkDeviceSize offset);
5393 void ChangeOffset(VkDeviceSize newOffset);
5396 void InitDedicatedAllocation(
5397 uint32_t memoryTypeIndex,
5398 VkDeviceMemory hMemory,
5399 VmaSuballocationType suballocationType,
5403 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5404 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5405 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5408 m_MemoryTypeIndex = memoryTypeIndex;
5409 m_SuballocationType = (uint8_t)suballocationType;
5410 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5411 m_DedicatedAllocation.m_hMemory = hMemory;
5412 m_DedicatedAllocation.m_pMappedData = pMappedData;
5415 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5416 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5417 VkDeviceSize GetSize()
const {
return m_Size; }
5418 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5419 void* GetUserData()
const {
return m_pUserData; }
5420 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5421 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5423 VmaDeviceMemoryBlock* GetBlock()
const
5425 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5426 return m_BlockAllocation.m_Block;
5428 VkDeviceSize GetOffset()
const;
5429 VkDeviceMemory GetMemory()
const;
5430 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5431 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5432 void* GetMappedData()
const;
5433 bool CanBecomeLost()
const;
5435 uint32_t GetLastUseFrameIndex()
const
5437 return m_LastUseFrameIndex.load();
5439 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5441 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5451 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5453 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5455 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5466 void BlockAllocMap();
5467 void BlockAllocUnmap();
5468 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5471 #if VMA_STATS_STRING_ENABLED
5472 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5473 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5475 void InitBufferImageUsage(uint32_t bufferImageUsage)
5477 VMA_ASSERT(m_BufferImageUsage == 0);
5478 m_BufferImageUsage = bufferImageUsage;
5481 void PrintParameters(
class VmaJsonWriter& json)
const;
5485 VkDeviceSize m_Alignment;
5486 VkDeviceSize m_Size;
5488 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5489 uint32_t m_MemoryTypeIndex;
5491 uint8_t m_SuballocationType;
5498 struct BlockAllocation
5500 VmaDeviceMemoryBlock* m_Block;
5501 VkDeviceSize m_Offset;
5502 bool m_CanBecomeLost;
5506 struct DedicatedAllocation
5508 VkDeviceMemory m_hMemory;
5509 void* m_pMappedData;
5515 BlockAllocation m_BlockAllocation;
5517 DedicatedAllocation m_DedicatedAllocation;
5520 #if VMA_STATS_STRING_ENABLED
5521 uint32_t m_CreationFrameIndex;
5522 uint32_t m_BufferImageUsage;
5532 struct VmaSuballocation
5534 VkDeviceSize offset;
5537 VmaSuballocationType type;
5541 struct VmaSuballocationOffsetLess
5543 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5545 return lhs.offset < rhs.offset;
5548 struct VmaSuballocationOffsetGreater
5550 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5552 return lhs.offset > rhs.offset;
5556 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5559 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5561 enum class VmaAllocationRequestType
5583 struct VmaAllocationRequest
5585 VkDeviceSize offset;
5586 VkDeviceSize sumFreeSize;
5587 VkDeviceSize sumItemSize;
5588 VmaSuballocationList::iterator item;
5589 size_t itemsToMakeLostCount;
5591 VmaAllocationRequestType type;
5593 VkDeviceSize CalcCost()
const
5595 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5603 class VmaBlockMetadata
5607 virtual ~VmaBlockMetadata() { }
5608 virtual void Init(VkDeviceSize size) { m_Size = size; }
5611 virtual bool Validate()
const = 0;
5612 VkDeviceSize GetSize()
const {
return m_Size; }
5613 virtual size_t GetAllocationCount()
const = 0;
5614 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5615 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5617 virtual bool IsEmpty()
const = 0;
5619 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5621 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5623 #if VMA_STATS_STRING_ENABLED
5624 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5630 virtual bool CreateAllocationRequest(
5631 uint32_t currentFrameIndex,
5632 uint32_t frameInUseCount,
5633 VkDeviceSize bufferImageGranularity,
5634 VkDeviceSize allocSize,
5635 VkDeviceSize allocAlignment,
5637 VmaSuballocationType allocType,
5638 bool canMakeOtherLost,
5641 VmaAllocationRequest* pAllocationRequest) = 0;
5643 virtual bool MakeRequestedAllocationsLost(
5644 uint32_t currentFrameIndex,
5645 uint32_t frameInUseCount,
5646 VmaAllocationRequest* pAllocationRequest) = 0;
5648 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5650 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5654 const VmaAllocationRequest& request,
5655 VmaSuballocationType type,
5656 VkDeviceSize allocSize,
5661 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5664 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5666 #if VMA_STATS_STRING_ENABLED
5667 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5668 VkDeviceSize unusedBytes,
5669 size_t allocationCount,
5670 size_t unusedRangeCount)
const;
5671 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5672 VkDeviceSize offset,
5674 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5675 VkDeviceSize offset,
5676 VkDeviceSize size)
const;
5677 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5681 VkDeviceSize m_Size;
5682 const VkAllocationCallbacks* m_pAllocationCallbacks;
5685 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
5686 VMA_ASSERT(0 && "Validation failed: " #cond); \
5690 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5692 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5695 virtual ~VmaBlockMetadata_Generic();
5696 virtual void Init(VkDeviceSize size);
5698 virtual bool Validate()
const;
5699 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5700 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5701 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5702 virtual bool IsEmpty()
const;
5704 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5705 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5707 #if VMA_STATS_STRING_ENABLED
5708 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5711 virtual bool CreateAllocationRequest(
5712 uint32_t currentFrameIndex,
5713 uint32_t frameInUseCount,
5714 VkDeviceSize bufferImageGranularity,
5715 VkDeviceSize allocSize,
5716 VkDeviceSize allocAlignment,
5718 VmaSuballocationType allocType,
5719 bool canMakeOtherLost,
5721 VmaAllocationRequest* pAllocationRequest);
5723 virtual bool MakeRequestedAllocationsLost(
5724 uint32_t currentFrameIndex,
5725 uint32_t frameInUseCount,
5726 VmaAllocationRequest* pAllocationRequest);
5728 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5730 virtual VkResult CheckCorruption(
const void* pBlockData);
5733 const VmaAllocationRequest& request,
5734 VmaSuballocationType type,
5735 VkDeviceSize allocSize,
5739 virtual void FreeAtOffset(VkDeviceSize offset);
5744 bool IsBufferImageGranularityConflictPossible(
5745 VkDeviceSize bufferImageGranularity,
5746 VmaSuballocationType& inOutPrevSuballocType)
const;
5749 friend class VmaDefragmentationAlgorithm_Generic;
5750 friend class VmaDefragmentationAlgorithm_Fast;
5752 uint32_t m_FreeCount;
5753 VkDeviceSize m_SumFreeSize;
5754 VmaSuballocationList m_Suballocations;
5757 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5759 bool ValidateFreeSuballocationList()
const;
5763 bool CheckAllocation(
5764 uint32_t currentFrameIndex,
5765 uint32_t frameInUseCount,
5766 VkDeviceSize bufferImageGranularity,
5767 VkDeviceSize allocSize,
5768 VkDeviceSize allocAlignment,
5769 VmaSuballocationType allocType,
5770 VmaSuballocationList::const_iterator suballocItem,
5771 bool canMakeOtherLost,
5772 VkDeviceSize* pOffset,
5773 size_t* itemsToMakeLostCount,
5774 VkDeviceSize* pSumFreeSize,
5775 VkDeviceSize* pSumItemSize)
const;
5777 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5781 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5784 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5787 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5868 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5870 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5873 virtual ~VmaBlockMetadata_Linear();
5874 virtual void Init(VkDeviceSize size);
5876 virtual bool Validate()
const;
5877 virtual size_t GetAllocationCount()
const;
5878 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5879 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5880 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5882 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5883 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5885 #if VMA_STATS_STRING_ENABLED
5886 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5889 virtual bool CreateAllocationRequest(
5890 uint32_t currentFrameIndex,
5891 uint32_t frameInUseCount,
5892 VkDeviceSize bufferImageGranularity,
5893 VkDeviceSize allocSize,
5894 VkDeviceSize allocAlignment,
5896 VmaSuballocationType allocType,
5897 bool canMakeOtherLost,
5899 VmaAllocationRequest* pAllocationRequest);
5901 virtual bool MakeRequestedAllocationsLost(
5902 uint32_t currentFrameIndex,
5903 uint32_t frameInUseCount,
5904 VmaAllocationRequest* pAllocationRequest);
5906 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5908 virtual VkResult CheckCorruption(
const void* pBlockData);
5911 const VmaAllocationRequest& request,
5912 VmaSuballocationType type,
5913 VkDeviceSize allocSize,
5917 virtual void FreeAtOffset(VkDeviceSize offset);
5927 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5929 enum SECOND_VECTOR_MODE
5931 SECOND_VECTOR_EMPTY,
5936 SECOND_VECTOR_RING_BUFFER,
5942 SECOND_VECTOR_DOUBLE_STACK,
5945 VkDeviceSize m_SumFreeSize;
5946 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5947 uint32_t m_1stVectorIndex;
5948 SECOND_VECTOR_MODE m_2ndVectorMode;
5950 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5951 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5952 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5953 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5956 size_t m_1stNullItemsBeginCount;
5958 size_t m_1stNullItemsMiddleCount;
5960 size_t m_2ndNullItemsCount;
5962 bool ShouldCompact1st()
const;
5963 void CleanupAfterFree();
5965 bool CreateAllocationRequest_LowerAddress(
5966 uint32_t currentFrameIndex,
5967 uint32_t frameInUseCount,
5968 VkDeviceSize bufferImageGranularity,
5969 VkDeviceSize allocSize,
5970 VkDeviceSize allocAlignment,
5971 VmaSuballocationType allocType,
5972 bool canMakeOtherLost,
5974 VmaAllocationRequest* pAllocationRequest);
5975 bool CreateAllocationRequest_UpperAddress(
5976 uint32_t currentFrameIndex,
5977 uint32_t frameInUseCount,
5978 VkDeviceSize bufferImageGranularity,
5979 VkDeviceSize allocSize,
5980 VkDeviceSize allocAlignment,
5981 VmaSuballocationType allocType,
5982 bool canMakeOtherLost,
5984 VmaAllocationRequest* pAllocationRequest);
5998 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6000 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6003 virtual ~VmaBlockMetadata_Buddy();
6004 virtual void Init(VkDeviceSize size);
6006 virtual bool Validate()
const;
6007 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6008 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6009 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6010 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6012 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6013 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6015 #if VMA_STATS_STRING_ENABLED
6016 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6019 virtual bool CreateAllocationRequest(
6020 uint32_t currentFrameIndex,
6021 uint32_t frameInUseCount,
6022 VkDeviceSize bufferImageGranularity,
6023 VkDeviceSize allocSize,
6024 VkDeviceSize allocAlignment,
6026 VmaSuballocationType allocType,
6027 bool canMakeOtherLost,
6029 VmaAllocationRequest* pAllocationRequest);
6031 virtual bool MakeRequestedAllocationsLost(
6032 uint32_t currentFrameIndex,
6033 uint32_t frameInUseCount,
6034 VmaAllocationRequest* pAllocationRequest);
6036 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6038 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6041 const VmaAllocationRequest& request,
6042 VmaSuballocationType type,
6043 VkDeviceSize allocSize,
6046 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6047 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6050 static const VkDeviceSize MIN_NODE_SIZE = 32;
6051 static const size_t MAX_LEVELS = 30;
6053 struct ValidationContext
6055 size_t calculatedAllocationCount;
6056 size_t calculatedFreeCount;
6057 VkDeviceSize calculatedSumFreeSize;
6059 ValidationContext() :
6060 calculatedAllocationCount(0),
6061 calculatedFreeCount(0),
6062 calculatedSumFreeSize(0) { }
6067 VkDeviceSize offset;
6097 VkDeviceSize m_UsableSize;
6098 uint32_t m_LevelCount;
6104 } m_FreeList[MAX_LEVELS];
6106 size_t m_AllocationCount;
6110 VkDeviceSize m_SumFreeSize;
6112 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6113 void DeleteNode(Node* node);
6114 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6115 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6116 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6118 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6119 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6123 void AddToFreeListFront(uint32_t level, Node* node);
6127 void RemoveFromFreeList(uint32_t level, Node* node);
6129 #if VMA_STATS_STRING_ENABLED
6130 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6140 class VmaDeviceMemoryBlock
6142 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6144 VmaBlockMetadata* m_pMetadata;
6148 ~VmaDeviceMemoryBlock()
6150 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6151 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6158 uint32_t newMemoryTypeIndex,
6159 VkDeviceMemory newMemory,
6160 VkDeviceSize newSize,
6162 uint32_t algorithm);
6166 VmaPool GetParentPool()
const {
return m_hParentPool; }
6167 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6168 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6169 uint32_t GetId()
const {
return m_Id; }
6170 void* GetMappedData()
const {
return m_pMappedData; }
6173 bool Validate()
const;
6178 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6181 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6182 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6184 VkResult BindBufferMemory(
6187 VkDeviceSize allocationLocalOffset,
6190 VkResult BindImageMemory(
6193 VkDeviceSize allocationLocalOffset,
6199 uint32_t m_MemoryTypeIndex;
6201 VkDeviceMemory m_hMemory;
6209 uint32_t m_MapCount;
6210 void* m_pMappedData;
6213 struct VmaPointerLess
6215 bool operator()(
const void* lhs,
const void* rhs)
const
6221 struct VmaDefragmentationMove
6223 size_t srcBlockIndex;
6224 size_t dstBlockIndex;
6225 VkDeviceSize srcOffset;
6226 VkDeviceSize dstOffset;
6230 class VmaDefragmentationAlgorithm;
6238 struct VmaBlockVector
6240 VMA_CLASS_NO_COPY(VmaBlockVector)
6245 uint32_t memoryTypeIndex,
6246 VkDeviceSize preferredBlockSize,
6247 size_t minBlockCount,
6248 size_t maxBlockCount,
6249 VkDeviceSize bufferImageGranularity,
6250 uint32_t frameInUseCount,
6251 bool explicitBlockSize,
6252 uint32_t algorithm);
6255 VkResult CreateMinBlocks();
6257 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6258 VmaPool GetParentPool()
const {
return m_hParentPool; }
6259 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6260 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6261 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6262 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6263 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6264 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6268 bool IsEmpty()
const {
return m_Blocks.empty(); }
6269 bool IsCorruptionDetectionEnabled()
const;
6272 uint32_t currentFrameIndex,
6274 VkDeviceSize alignment,
6276 VmaSuballocationType suballocType,
6277 size_t allocationCount,
6285 #if VMA_STATS_STRING_ENABLED
6286 void PrintDetailedMap(
class VmaJsonWriter& json);
6289 void MakePoolAllocationsLost(
6290 uint32_t currentFrameIndex,
6291 size_t* pLostAllocationCount);
6292 VkResult CheckCorruption();
6296 class VmaBlockVectorDefragmentationContext* pCtx,
6298 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6299 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6300 VkCommandBuffer commandBuffer);
6301 void DefragmentationEnd(
6302 class VmaBlockVectorDefragmentationContext* pCtx,
6308 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6309 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6310 size_t CalcAllocationCount()
const;
6311 bool IsBufferImageGranularityConflictPossible()
const;
6314 friend class VmaDefragmentationAlgorithm_Generic;
6318 const uint32_t m_MemoryTypeIndex;
6319 const VkDeviceSize m_PreferredBlockSize;
6320 const size_t m_MinBlockCount;
6321 const size_t m_MaxBlockCount;
6322 const VkDeviceSize m_BufferImageGranularity;
6323 const uint32_t m_FrameInUseCount;
6324 const bool m_ExplicitBlockSize;
6325 const uint32_t m_Algorithm;
6326 VMA_RW_MUTEX m_Mutex;
6330 bool m_HasEmptyBlock;
6332 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6333 uint32_t m_NextBlockId;
6335 VkDeviceSize CalcMaxBlockSize()
const;
6338 void Remove(VmaDeviceMemoryBlock* pBlock);
6342 void IncrementallySortBlocks();
6344 VkResult AllocatePage(
6345 uint32_t currentFrameIndex,
6347 VkDeviceSize alignment,
6349 VmaSuballocationType suballocType,
6353 VkResult AllocateFromBlock(
6354 VmaDeviceMemoryBlock* pBlock,
6355 uint32_t currentFrameIndex,
6357 VkDeviceSize alignment,
6360 VmaSuballocationType suballocType,
6364 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6367 void ApplyDefragmentationMovesCpu(
6368 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6369 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6371 void ApplyDefragmentationMovesGpu(
6372 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6373 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6374 VkCommandBuffer commandBuffer);
6382 void UpdateHasEmptyBlock();
6387 VMA_CLASS_NO_COPY(VmaPool_T)
6389 VmaBlockVector m_BlockVector;
6394 VkDeviceSize preferredBlockSize);
6397 uint32_t GetId()
const {
return m_Id; }
6398 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6400 const char* GetName()
const {
return m_Name; }
6401 void SetName(
const char* pName);
6403 #if VMA_STATS_STRING_ENABLED
6419 class VmaDefragmentationAlgorithm
6421 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6423 VmaDefragmentationAlgorithm(
6425 VmaBlockVector* pBlockVector,
6426 uint32_t currentFrameIndex) :
6427 m_hAllocator(hAllocator),
6428 m_pBlockVector(pBlockVector),
6429 m_CurrentFrameIndex(currentFrameIndex)
6432 virtual ~VmaDefragmentationAlgorithm()
6436 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6437 virtual void AddAll() = 0;
6439 virtual VkResult Defragment(
6440 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6441 VkDeviceSize maxBytesToMove,
6442 uint32_t maxAllocationsToMove) = 0;
6444 virtual VkDeviceSize GetBytesMoved()
const = 0;
6445 virtual uint32_t GetAllocationsMoved()
const = 0;
6449 VmaBlockVector*
const m_pBlockVector;
6450 const uint32_t m_CurrentFrameIndex;
6452 struct AllocationInfo
6455 VkBool32* m_pChanged;
6458 m_hAllocation(VK_NULL_HANDLE),
6459 m_pChanged(VMA_NULL)
6463 m_hAllocation(hAlloc),
6464 m_pChanged(pChanged)
6470 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6472 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6474 VmaDefragmentationAlgorithm_Generic(
6476 VmaBlockVector* pBlockVector,
6477 uint32_t currentFrameIndex,
6478 bool overlappingMoveSupported);
6479 virtual ~VmaDefragmentationAlgorithm_Generic();
6481 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6482 virtual void AddAll() { m_AllAllocations =
true; }
6484 virtual VkResult Defragment(
6485 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6486 VkDeviceSize maxBytesToMove,
6487 uint32_t maxAllocationsToMove);
6489 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6490 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6493 uint32_t m_AllocationCount;
6494 bool m_AllAllocations;
6496 VkDeviceSize m_BytesMoved;
6497 uint32_t m_AllocationsMoved;
6499 struct AllocationInfoSizeGreater
6501 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6503 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6507 struct AllocationInfoOffsetGreater
6509 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6511 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6517 size_t m_OriginalBlockIndex;
6518 VmaDeviceMemoryBlock* m_pBlock;
6519 bool m_HasNonMovableAllocations;
6520 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6522 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6523 m_OriginalBlockIndex(SIZE_MAX),
6525 m_HasNonMovableAllocations(true),
6526 m_Allocations(pAllocationCallbacks)
6530 void CalcHasNonMovableAllocations()
6532 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6533 const size_t defragmentAllocCount = m_Allocations.size();
6534 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6537 void SortAllocationsBySizeDescending()
6539 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6542 void SortAllocationsByOffsetDescending()
6544 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6548 struct BlockPointerLess
6550 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
6552 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6554 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6556 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6562 struct BlockInfoCompareMoveDestination
6564 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6566 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6570 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6574 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6582 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6583 BlockInfoVector m_Blocks;
6585 VkResult DefragmentRound(
6586 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6587 VkDeviceSize maxBytesToMove,
6588 uint32_t maxAllocationsToMove);
6590 size_t CalcBlocksWithNonMovableCount()
const;
6592 static bool MoveMakesSense(
6593 size_t dstBlockIndex, VkDeviceSize dstOffset,
6594 size_t srcBlockIndex, VkDeviceSize srcOffset);
6597 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6599 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6601 VmaDefragmentationAlgorithm_Fast(
6603 VmaBlockVector* pBlockVector,
6604 uint32_t currentFrameIndex,
6605 bool overlappingMoveSupported);
6606 virtual ~VmaDefragmentationAlgorithm_Fast();
6608 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6609 virtual void AddAll() { m_AllAllocations =
true; }
6611 virtual VkResult Defragment(
6612 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6613 VkDeviceSize maxBytesToMove,
6614 uint32_t maxAllocationsToMove);
6616 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6617 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6622 size_t origBlockIndex;
6625 class FreeSpaceDatabase
6631 s.blockInfoIndex = SIZE_MAX;
6632 for(
size_t i = 0; i < MAX_COUNT; ++i)
6634 m_FreeSpaces[i] = s;
6638 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6640 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6646 size_t bestIndex = SIZE_MAX;
6647 for(
size_t i = 0; i < MAX_COUNT; ++i)
6650 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6655 if(m_FreeSpaces[i].size < size &&
6656 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6662 if(bestIndex != SIZE_MAX)
6664 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6665 m_FreeSpaces[bestIndex].offset = offset;
6666 m_FreeSpaces[bestIndex].size = size;
6670 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6671 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6673 size_t bestIndex = SIZE_MAX;
6674 VkDeviceSize bestFreeSpaceAfter = 0;
6675 for(
size_t i = 0; i < MAX_COUNT; ++i)
6678 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6680 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6682 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6684 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6686 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6689 bestFreeSpaceAfter = freeSpaceAfter;
6695 if(bestIndex != SIZE_MAX)
6697 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6698 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6700 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6703 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6704 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6705 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6710 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6720 static const size_t MAX_COUNT = 4;
6724 size_t blockInfoIndex;
6725 VkDeviceSize offset;
6727 } m_FreeSpaces[MAX_COUNT];
6730 const bool m_OverlappingMoveSupported;
6732 uint32_t m_AllocationCount;
6733 bool m_AllAllocations;
6735 VkDeviceSize m_BytesMoved;
6736 uint32_t m_AllocationsMoved;
6738 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6740 void PreprocessMetadata();
6741 void PostprocessMetadata();
6742 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6745 struct VmaBlockDefragmentationContext
6749 BLOCK_FLAG_USED = 0x00000001,
6755 class VmaBlockVectorDefragmentationContext
6757 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6761 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6763 VmaBlockVectorDefragmentationContext(
6766 VmaBlockVector* pBlockVector,
6767 uint32_t currFrameIndex);
6768 ~VmaBlockVectorDefragmentationContext();
6770 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6771 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6772 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6774 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6775 void AddAll() { m_AllAllocations =
true; }
6777 void Begin(
bool overlappingMoveSupported);
6784 VmaBlockVector*
const m_pBlockVector;
6785 const uint32_t m_CurrFrameIndex;
6787 VmaDefragmentationAlgorithm* m_pAlgorithm;
6795 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6796 bool m_AllAllocations;
6799 struct VmaDefragmentationContext_T
6802 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6804 VmaDefragmentationContext_T(
6806 uint32_t currFrameIndex,
6809 ~VmaDefragmentationContext_T();
6811 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6812 void AddAllocations(
6813 uint32_t allocationCount,
6815 VkBool32* pAllocationsChanged);
6823 VkResult Defragment(
6824 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6825 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6830 const uint32_t m_CurrFrameIndex;
6831 const uint32_t m_Flags;
6834 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6836 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6839 #if VMA_RECORDING_ENABLED
6846 void WriteConfiguration(
6847 const VkPhysicalDeviceProperties& devProps,
6848 const VkPhysicalDeviceMemoryProperties& memProps,
6849 uint32_t vulkanApiVersion,
6850 bool dedicatedAllocationExtensionEnabled,
6851 bool bindMemory2ExtensionEnabled,
6852 bool memoryBudgetExtensionEnabled);
6855 void RecordCreateAllocator(uint32_t frameIndex);
6856 void RecordDestroyAllocator(uint32_t frameIndex);
6857 void RecordCreatePool(uint32_t frameIndex,
6860 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6861 void RecordAllocateMemory(uint32_t frameIndex,
6862 const VkMemoryRequirements& vkMemReq,
6865 void RecordAllocateMemoryPages(uint32_t frameIndex,
6866 const VkMemoryRequirements& vkMemReq,
6868 uint64_t allocationCount,
6870 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6871 const VkMemoryRequirements& vkMemReq,
6872 bool requiresDedicatedAllocation,
6873 bool prefersDedicatedAllocation,
6876 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6877 const VkMemoryRequirements& vkMemReq,
6878 bool requiresDedicatedAllocation,
6879 bool prefersDedicatedAllocation,
6882 void RecordFreeMemory(uint32_t frameIndex,
6884 void RecordFreeMemoryPages(uint32_t frameIndex,
6885 uint64_t allocationCount,
6887 void RecordSetAllocationUserData(uint32_t frameIndex,
6889 const void* pUserData);
6890 void RecordCreateLostAllocation(uint32_t frameIndex,
6892 void RecordMapMemory(uint32_t frameIndex,
6894 void RecordUnmapMemory(uint32_t frameIndex,
6896 void RecordFlushAllocation(uint32_t frameIndex,
6897 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6898 void RecordInvalidateAllocation(uint32_t frameIndex,
6899 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6900 void RecordCreateBuffer(uint32_t frameIndex,
6901 const VkBufferCreateInfo& bufCreateInfo,
6904 void RecordCreateImage(uint32_t frameIndex,
6905 const VkImageCreateInfo& imageCreateInfo,
6908 void RecordDestroyBuffer(uint32_t frameIndex,
6910 void RecordDestroyImage(uint32_t frameIndex,
6912 void RecordTouchAllocation(uint32_t frameIndex,
6914 void RecordGetAllocationInfo(uint32_t frameIndex,
6916 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6918 void RecordDefragmentationBegin(uint32_t frameIndex,
6921 void RecordDefragmentationEnd(uint32_t frameIndex,
6923 void RecordSetPoolName(uint32_t frameIndex,
6934 class UserDataString
6938 const char* GetString()
const {
return m_Str; }
6948 VMA_MUTEX m_FileMutex;
6950 int64_t m_StartCounter;
6952 void GetBasicParams(CallParams& outParams);
6955 template<
typename T>
6956 void PrintPointerList(uint64_t count,
const T* pItems)
6960 fprintf(m_File,
"%p", pItems[0]);
6961 for(uint64_t i = 1; i < count; ++i)
6963 fprintf(m_File,
" %p", pItems[i]);
6968 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6972 #endif // #if VMA_RECORDING_ENABLED
6977 class VmaAllocationObjectAllocator
6979 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6981 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6988 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6991 struct VmaCurrentBudgetData
6993 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
6994 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
6996 #if VMA_MEMORY_BUDGET
6997 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
6998 VMA_RW_MUTEX m_BudgetMutex;
6999 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7000 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7001 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7002 #endif // #if VMA_MEMORY_BUDGET
7004 VmaCurrentBudgetData()
7006 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7008 m_BlockBytes[heapIndex] = 0;
7009 m_AllocationBytes[heapIndex] = 0;
7010 #if VMA_MEMORY_BUDGET
7011 m_VulkanUsage[heapIndex] = 0;
7012 m_VulkanBudget[heapIndex] = 0;
7013 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7017 #if VMA_MEMORY_BUDGET
7018 m_OperationsSinceBudgetFetch = 0;
7022 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7024 m_AllocationBytes[heapIndex] += allocationSize;
7025 #if VMA_MEMORY_BUDGET
7026 ++m_OperationsSinceBudgetFetch;
7030 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7032 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7033 m_AllocationBytes[heapIndex] -= allocationSize;
7034 #if VMA_MEMORY_BUDGET
7035 ++m_OperationsSinceBudgetFetch;
7041 struct VmaAllocator_T
7043 VMA_CLASS_NO_COPY(VmaAllocator_T)
7046 uint32_t m_VulkanApiVersion;
7047 bool m_UseKhrDedicatedAllocation;
7048 bool m_UseKhrBindMemory2;
7049 bool m_UseExtMemoryBudget;
7051 VkInstance m_hInstance;
7052 bool m_AllocationCallbacksSpecified;
7053 VkAllocationCallbacks m_AllocationCallbacks;
7055 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7058 uint32_t m_HeapSizeLimitMask;
7060 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7061 VkPhysicalDeviceMemoryProperties m_MemProps;
7064 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7067 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7068 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7069 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7071 VmaCurrentBudgetData m_Budget;
7077 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7079 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7083 return m_VulkanFunctions;
7086 VkDeviceSize GetBufferImageGranularity()
const
7089 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7090 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7093 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7094 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7096 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7098 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7099 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7102 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7104 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7105 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7108 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7110 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7111 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7112 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7115 bool IsIntegratedGpu()
const
7117 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7120 #if VMA_RECORDING_ENABLED
7121 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7124 void GetBufferMemoryRequirements(
7126 VkMemoryRequirements& memReq,
7127 bool& requiresDedicatedAllocation,
7128 bool& prefersDedicatedAllocation)
const;
7129 void GetImageMemoryRequirements(
7131 VkMemoryRequirements& memReq,
7132 bool& requiresDedicatedAllocation,
7133 bool& prefersDedicatedAllocation)
const;
7136 VkResult AllocateMemory(
7137 const VkMemoryRequirements& vkMemReq,
7138 bool requiresDedicatedAllocation,
7139 bool prefersDedicatedAllocation,
7140 VkBuffer dedicatedBuffer,
7141 VkImage dedicatedImage,
7143 VmaSuballocationType suballocType,
7144 size_t allocationCount,
7149 size_t allocationCount,
7152 VkResult ResizeAllocation(
7154 VkDeviceSize newSize);
7156 void CalculateStats(
VmaStats* pStats);
7159 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7161 #if VMA_STATS_STRING_ENABLED
7162 void PrintDetailedMap(
class VmaJsonWriter& json);
7165 VkResult DefragmentationBegin(
7169 VkResult DefragmentationEnd(
7176 void DestroyPool(
VmaPool pool);
7179 void SetCurrentFrameIndex(uint32_t frameIndex);
7180 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7182 void MakePoolAllocationsLost(
7184 size_t* pLostAllocationCount);
7185 VkResult CheckPoolCorruption(
VmaPool hPool);
7186 VkResult CheckCorruption(uint32_t memoryTypeBits);
7191 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7193 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7195 VkResult BindVulkanBuffer(
7196 VkDeviceMemory memory,
7197 VkDeviceSize memoryOffset,
7201 VkResult BindVulkanImage(
7202 VkDeviceMemory memory,
7203 VkDeviceSize memoryOffset,
7210 VkResult BindBufferMemory(
7212 VkDeviceSize allocationLocalOffset,
7215 VkResult BindImageMemory(
7217 VkDeviceSize allocationLocalOffset,
7221 void FlushOrInvalidateAllocation(
7223 VkDeviceSize offset, VkDeviceSize size,
7224 VMA_CACHE_OPERATION op);
7226 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7232 uint32_t GetGpuDefragmentationMemoryTypeBits();
7235 VkDeviceSize m_PreferredLargeHeapBlockSize;
7237 VkPhysicalDevice m_PhysicalDevice;
7238 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7239 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7241 VMA_RW_MUTEX m_PoolsMutex;
7243 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7244 uint32_t m_NextPoolId;
7248 #if VMA_RECORDING_ENABLED
7249 VmaRecorder* m_pRecorder;
7254 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7256 VkResult AllocateMemoryOfType(
7258 VkDeviceSize alignment,
7259 bool dedicatedAllocation,
7260 VkBuffer dedicatedBuffer,
7261 VkImage dedicatedImage,
7263 uint32_t memTypeIndex,
7264 VmaSuballocationType suballocType,
7265 size_t allocationCount,
7269 VkResult AllocateDedicatedMemoryPage(
7271 VmaSuballocationType suballocType,
7272 uint32_t memTypeIndex,
7273 const VkMemoryAllocateInfo& allocInfo,
7275 bool isUserDataString,
7280 VkResult AllocateDedicatedMemory(
7282 VmaSuballocationType suballocType,
7283 uint32_t memTypeIndex,
7286 bool isUserDataString,
7288 VkBuffer dedicatedBuffer,
7289 VkImage dedicatedImage,
7290 size_t allocationCount,
7299 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7301 #if VMA_MEMORY_BUDGET
7302 void UpdateVulkanBudget();
7303 #endif // #if VMA_MEMORY_BUDGET
7309 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7311 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7314 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7316 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7319 template<
typename T>
7322 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7325 template<
typename T>
7326 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7328 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7331 template<
typename T>
7332 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7337 VmaFree(hAllocator, ptr);
7341 template<
typename T>
7342 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7346 for(
size_t i = count; i--; )
7348 VmaFree(hAllocator, ptr);
7355 #if VMA_STATS_STRING_ENABLED
7357 class VmaStringBuilder
7360 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7361 size_t GetLength()
const {
return m_Data.size(); }
7362 const char* GetData()
const {
return m_Data.data(); }
7364 void Add(
char ch) { m_Data.push_back(ch); }
7365 void Add(
const char* pStr);
7366 void AddNewLine() { Add(
'\n'); }
7367 void AddNumber(uint32_t num);
7368 void AddNumber(uint64_t num);
7369 void AddPointer(
const void* ptr);
7372 VmaVector< char, VmaStlAllocator<char> > m_Data;
7375 void VmaStringBuilder::Add(
const char* pStr)
7377 const size_t strLen = strlen(pStr);
7380 const size_t oldCount = m_Data.size();
7381 m_Data.resize(oldCount + strLen);
7382 memcpy(m_Data.data() + oldCount, pStr, strLen);
7386 void VmaStringBuilder::AddNumber(uint32_t num)
7393 *--p =
'0' + (num % 10);
7400 void VmaStringBuilder::AddNumber(uint64_t num)
7407 *--p =
'0' + (num % 10);
7414 void VmaStringBuilder::AddPointer(
const void* ptr)
7417 VmaPtrToStr(buf,
sizeof(buf), ptr);
7421 #endif // #if VMA_STATS_STRING_ENABLED
7426 #if VMA_STATS_STRING_ENABLED
7430 VMA_CLASS_NO_COPY(VmaJsonWriter)
7432 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7435 void BeginObject(
bool singleLine =
false);
7438 void BeginArray(
bool singleLine =
false);
7441 void WriteString(
const char* pStr);
7442 void BeginString(
const char* pStr = VMA_NULL);
7443 void ContinueString(
const char* pStr);
7444 void ContinueString(uint32_t n);
7445 void ContinueString(uint64_t n);
7446 void ContinueString_Pointer(
const void* ptr);
7447 void EndString(
const char* pStr = VMA_NULL);
7449 void WriteNumber(uint32_t n);
7450 void WriteNumber(uint64_t n);
7451 void WriteBool(
bool b);
7455 static const char*
const INDENT;
7457 enum COLLECTION_TYPE
7459 COLLECTION_TYPE_OBJECT,
7460 COLLECTION_TYPE_ARRAY,
7464 COLLECTION_TYPE type;
7465 uint32_t valueCount;
7466 bool singleLineMode;
7469 VmaStringBuilder& m_SB;
7470 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7471 bool m_InsideString;
7473 void BeginValue(
bool isString);
7474 void WriteIndent(
bool oneLess =
false);
7477 const char*
const VmaJsonWriter::INDENT =
" ";
7479 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7481 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7482 m_InsideString(false)
7486 VmaJsonWriter::~VmaJsonWriter()
7488 VMA_ASSERT(!m_InsideString);
7489 VMA_ASSERT(m_Stack.empty());
7492 void VmaJsonWriter::BeginObject(
bool singleLine)
7494 VMA_ASSERT(!m_InsideString);
7500 item.type = COLLECTION_TYPE_OBJECT;
7501 item.valueCount = 0;
7502 item.singleLineMode = singleLine;
7503 m_Stack.push_back(item);
7506 void VmaJsonWriter::EndObject()
7508 VMA_ASSERT(!m_InsideString);
7513 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7517 void VmaJsonWriter::BeginArray(
bool singleLine)
7519 VMA_ASSERT(!m_InsideString);
7525 item.type = COLLECTION_TYPE_ARRAY;
7526 item.valueCount = 0;
7527 item.singleLineMode = singleLine;
7528 m_Stack.push_back(item);
7531 void VmaJsonWriter::EndArray()
7533 VMA_ASSERT(!m_InsideString);
7538 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7542 void VmaJsonWriter::WriteString(
const char* pStr)
7548 void VmaJsonWriter::BeginString(
const char* pStr)
7550 VMA_ASSERT(!m_InsideString);
7554 m_InsideString =
true;
7555 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7557 ContinueString(pStr);
7561 void VmaJsonWriter::ContinueString(
const char* pStr)
7563 VMA_ASSERT(m_InsideString);
7565 const size_t strLen = strlen(pStr);
7566 for(
size_t i = 0; i < strLen; ++i)
7599 VMA_ASSERT(0 &&
"Character not currently supported.");
7605 void VmaJsonWriter::ContinueString(uint32_t n)
7607 VMA_ASSERT(m_InsideString);
7611 void VmaJsonWriter::ContinueString(uint64_t n)
7613 VMA_ASSERT(m_InsideString);
7617 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7619 VMA_ASSERT(m_InsideString);
7620 m_SB.AddPointer(ptr);
7623 void VmaJsonWriter::EndString(
const char* pStr)
7625 VMA_ASSERT(m_InsideString);
7626 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7628 ContinueString(pStr);
7631 m_InsideString =
false;
7634 void VmaJsonWriter::WriteNumber(uint32_t n)
7636 VMA_ASSERT(!m_InsideString);
7641 void VmaJsonWriter::WriteNumber(uint64_t n)
7643 VMA_ASSERT(!m_InsideString);
7648 void VmaJsonWriter::WriteBool(
bool b)
7650 VMA_ASSERT(!m_InsideString);
7652 m_SB.Add(b ?
"true" :
"false");
7655 void VmaJsonWriter::WriteNull()
7657 VMA_ASSERT(!m_InsideString);
7662 void VmaJsonWriter::BeginValue(
bool isString)
7664 if(!m_Stack.empty())
7666 StackItem& currItem = m_Stack.back();
7667 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7668 currItem.valueCount % 2 == 0)
7670 VMA_ASSERT(isString);
7673 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7674 currItem.valueCount % 2 != 0)
7678 else if(currItem.valueCount > 0)
7687 ++currItem.valueCount;
7691 void VmaJsonWriter::WriteIndent(
bool oneLess)
7693 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7697 size_t count = m_Stack.size();
7698 if(count > 0 && oneLess)
7702 for(
size_t i = 0; i < count; ++i)
7709 #endif // #if VMA_STATS_STRING_ENABLED
7713 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7715 if(IsUserDataString())
7717 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7719 FreeUserDataString(hAllocator);
7721 if(pUserData != VMA_NULL)
7723 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
7728 m_pUserData = pUserData;
7732 void VmaAllocation_T::ChangeBlockAllocation(
7734 VmaDeviceMemoryBlock* block,
7735 VkDeviceSize offset)
7737 VMA_ASSERT(block != VMA_NULL);
7738 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7741 if(block != m_BlockAllocation.m_Block)
7743 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7744 if(IsPersistentMap())
7746 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7747 block->Map(hAllocator, mapRefCount, VMA_NULL);
7750 m_BlockAllocation.m_Block = block;
7751 m_BlockAllocation.m_Offset = offset;
7754 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7756 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7757 m_BlockAllocation.m_Offset = newOffset;
7760 VkDeviceSize VmaAllocation_T::GetOffset()
const
7764 case ALLOCATION_TYPE_BLOCK:
7765 return m_BlockAllocation.m_Offset;
7766 case ALLOCATION_TYPE_DEDICATED:
7774 VkDeviceMemory VmaAllocation_T::GetMemory()
const
7778 case ALLOCATION_TYPE_BLOCK:
7779 return m_BlockAllocation.m_Block->GetDeviceMemory();
7780 case ALLOCATION_TYPE_DEDICATED:
7781 return m_DedicatedAllocation.m_hMemory;
7784 return VK_NULL_HANDLE;
7788 void* VmaAllocation_T::GetMappedData()
const
7792 case ALLOCATION_TYPE_BLOCK:
7795 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7796 VMA_ASSERT(pBlockData != VMA_NULL);
7797 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7804 case ALLOCATION_TYPE_DEDICATED:
7805 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7806 return m_DedicatedAllocation.m_pMappedData;
7813 bool VmaAllocation_T::CanBecomeLost()
const
7817 case ALLOCATION_TYPE_BLOCK:
7818 return m_BlockAllocation.m_CanBecomeLost;
7819 case ALLOCATION_TYPE_DEDICATED:
7827 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7829 VMA_ASSERT(CanBecomeLost());
7835 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7838 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7843 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7849 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7859 #if VMA_STATS_STRING_ENABLED
7862 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7871 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
7873 json.WriteString(
"Type");
7874 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7876 json.WriteString(
"Size");
7877 json.WriteNumber(m_Size);
7879 if(m_pUserData != VMA_NULL)
7881 json.WriteString(
"UserData");
7882 if(IsUserDataString())
7884 json.WriteString((
const char*)m_pUserData);
7889 json.ContinueString_Pointer(m_pUserData);
7894 json.WriteString(
"CreationFrameIndex");
7895 json.WriteNumber(m_CreationFrameIndex);
7897 json.WriteString(
"LastUseFrameIndex");
7898 json.WriteNumber(GetLastUseFrameIndex());
7900 if(m_BufferImageUsage != 0)
7902 json.WriteString(
"Usage");
7903 json.WriteNumber(m_BufferImageUsage);
7909 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7911 VMA_ASSERT(IsUserDataString());
7912 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
7913 m_pUserData = VMA_NULL;
7916 void VmaAllocation_T::BlockAllocMap()
7918 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7920 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7926 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7930 void VmaAllocation_T::BlockAllocUnmap()
7932 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7934 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7940 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7944 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7946 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7950 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7952 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7953 *ppData = m_DedicatedAllocation.m_pMappedData;
7959 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7960 return VK_ERROR_MEMORY_MAP_FAILED;
7965 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7966 hAllocator->m_hDevice,
7967 m_DedicatedAllocation.m_hMemory,
7972 if(result == VK_SUCCESS)
7974 m_DedicatedAllocation.m_pMappedData = *ppData;
7981 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7983 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7985 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7990 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7991 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7992 hAllocator->m_hDevice,
7993 m_DedicatedAllocation.m_hMemory);
7998 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8002 #if VMA_STATS_STRING_ENABLED
8004 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8008 json.WriteString(
"Blocks");
8011 json.WriteString(
"Allocations");
8014 json.WriteString(
"UnusedRanges");
8017 json.WriteString(
"UsedBytes");
8020 json.WriteString(
"UnusedBytes");
8025 json.WriteString(
"AllocationSize");
8026 json.BeginObject(
true);
8027 json.WriteString(
"Min");
8029 json.WriteString(
"Avg");
8031 json.WriteString(
"Max");
8038 json.WriteString(
"UnusedRangeSize");
8039 json.BeginObject(
true);
8040 json.WriteString(
"Min");
8042 json.WriteString(
"Avg");
8044 json.WriteString(
"Max");
8052 #endif // #if VMA_STATS_STRING_ENABLED
8054 struct VmaSuballocationItemSizeLess
8057 const VmaSuballocationList::iterator lhs,
8058 const VmaSuballocationList::iterator rhs)
const
8060 return lhs->size < rhs->size;
8063 const VmaSuballocationList::iterator lhs,
8064 VkDeviceSize rhsSize)
const
8066 return lhs->size < rhsSize;
8074 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8076 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8080 #if VMA_STATS_STRING_ENABLED
8082 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8083 VkDeviceSize unusedBytes,
8084 size_t allocationCount,
8085 size_t unusedRangeCount)
const
8089 json.WriteString(
"TotalBytes");
8090 json.WriteNumber(GetSize());
8092 json.WriteString(
"UnusedBytes");
8093 json.WriteNumber(unusedBytes);
8095 json.WriteString(
"Allocations");
8096 json.WriteNumber((uint64_t)allocationCount);
8098 json.WriteString(
"UnusedRanges");
8099 json.WriteNumber((uint64_t)unusedRangeCount);
8101 json.WriteString(
"Suballocations");
8105 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8106 VkDeviceSize offset,
8109 json.BeginObject(
true);
8111 json.WriteString(
"Offset");
8112 json.WriteNumber(offset);
8114 hAllocation->PrintParameters(json);
8119 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8120 VkDeviceSize offset,
8121 VkDeviceSize size)
const
8123 json.BeginObject(
true);
8125 json.WriteString(
"Offset");
8126 json.WriteNumber(offset);
8128 json.WriteString(
"Type");
8129 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8131 json.WriteString(
"Size");
8132 json.WriteNumber(size);
8137 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8143 #endif // #if VMA_STATS_STRING_ENABLED
8148 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8149 VmaBlockMetadata(hAllocator),
8152 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8153 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8157 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8161 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8163 VmaBlockMetadata::Init(size);
8166 m_SumFreeSize = size;
8168 VmaSuballocation suballoc = {};
8169 suballoc.offset = 0;
8170 suballoc.size = size;
8171 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8172 suballoc.hAllocation = VK_NULL_HANDLE;
8174 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8175 m_Suballocations.push_back(suballoc);
8176 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8178 m_FreeSuballocationsBySize.push_back(suballocItem);
8181 bool VmaBlockMetadata_Generic::Validate()
const
8183 VMA_VALIDATE(!m_Suballocations.empty());
8186 VkDeviceSize calculatedOffset = 0;
8188 uint32_t calculatedFreeCount = 0;
8190 VkDeviceSize calculatedSumFreeSize = 0;
8193 size_t freeSuballocationsToRegister = 0;
8195 bool prevFree =
false;
8197 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8198 suballocItem != m_Suballocations.cend();
8201 const VmaSuballocation& subAlloc = *suballocItem;
8204 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8206 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8208 VMA_VALIDATE(!prevFree || !currFree);
8210 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8214 calculatedSumFreeSize += subAlloc.size;
8215 ++calculatedFreeCount;
8216 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8218 ++freeSuballocationsToRegister;
8222 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8226 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8227 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8230 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8233 calculatedOffset += subAlloc.size;
8234 prevFree = currFree;
8239 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8241 VkDeviceSize lastSize = 0;
8242 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8244 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8247 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8249 VMA_VALIDATE(suballocItem->size >= lastSize);
8251 lastSize = suballocItem->size;
8255 VMA_VALIDATE(ValidateFreeSuballocationList());
8256 VMA_VALIDATE(calculatedOffset == GetSize());
8257 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8258 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8263 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8265 if(!m_FreeSuballocationsBySize.empty())
8267 return m_FreeSuballocationsBySize.back()->size;
8275 bool VmaBlockMetadata_Generic::IsEmpty()
const
8277 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8280 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8284 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8296 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8297 suballocItem != m_Suballocations.cend();
8300 const VmaSuballocation& suballoc = *suballocItem;
8301 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8314 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8316 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8318 inoutStats.
size += GetSize();
8325 #if VMA_STATS_STRING_ENABLED
8327 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8329 PrintDetailedMap_Begin(json,
8331 m_Suballocations.size() - (size_t)m_FreeCount,
8335 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8336 suballocItem != m_Suballocations.cend();
8337 ++suballocItem, ++i)
8339 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8341 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8345 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8349 PrintDetailedMap_End(json);
8352 #endif // #if VMA_STATS_STRING_ENABLED
8354 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8355 uint32_t currentFrameIndex,
8356 uint32_t frameInUseCount,
8357 VkDeviceSize bufferImageGranularity,
8358 VkDeviceSize allocSize,
8359 VkDeviceSize allocAlignment,
8361 VmaSuballocationType allocType,
8362 bool canMakeOtherLost,
8364 VmaAllocationRequest* pAllocationRequest)
8366 VMA_ASSERT(allocSize > 0);
8367 VMA_ASSERT(!upperAddress);
8368 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8369 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8370 VMA_HEAVY_ASSERT(Validate());
8372 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8375 if(canMakeOtherLost ==
false &&
8376 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8382 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8383 if(freeSuballocCount > 0)
8388 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8389 m_FreeSuballocationsBySize.data(),
8390 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8391 allocSize + 2 * VMA_DEBUG_MARGIN,
8392 VmaSuballocationItemSizeLess());
8393 size_t index = it - m_FreeSuballocationsBySize.data();
8394 for(; index < freeSuballocCount; ++index)
8399 bufferImageGranularity,
8403 m_FreeSuballocationsBySize[index],
8405 &pAllocationRequest->offset,
8406 &pAllocationRequest->itemsToMakeLostCount,
8407 &pAllocationRequest->sumFreeSize,
8408 &pAllocationRequest->sumItemSize))
8410 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8415 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8417 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8418 it != m_Suballocations.end();
8421 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8424 bufferImageGranularity,
8430 &pAllocationRequest->offset,
8431 &pAllocationRequest->itemsToMakeLostCount,
8432 &pAllocationRequest->sumFreeSize,
8433 &pAllocationRequest->sumItemSize))
8435 pAllocationRequest->item = it;
8443 for(
size_t index = freeSuballocCount; index--; )
8448 bufferImageGranularity,
8452 m_FreeSuballocationsBySize[index],
8454 &pAllocationRequest->offset,
8455 &pAllocationRequest->itemsToMakeLostCount,
8456 &pAllocationRequest->sumFreeSize,
8457 &pAllocationRequest->sumItemSize))
8459 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8466 if(canMakeOtherLost)
8471 VmaAllocationRequest tmpAllocRequest = {};
8472 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8473 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8474 suballocIt != m_Suballocations.end();
8477 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8478 suballocIt->hAllocation->CanBecomeLost())
8483 bufferImageGranularity,
8489 &tmpAllocRequest.offset,
8490 &tmpAllocRequest.itemsToMakeLostCount,
8491 &tmpAllocRequest.sumFreeSize,
8492 &tmpAllocRequest.sumItemSize))
8496 *pAllocationRequest = tmpAllocRequest;
8497 pAllocationRequest->item = suballocIt;
8500 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8502 *pAllocationRequest = tmpAllocRequest;
8503 pAllocationRequest->item = suballocIt;
8516 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8517 uint32_t currentFrameIndex,
8518 uint32_t frameInUseCount,
8519 VmaAllocationRequest* pAllocationRequest)
8521 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8523 while(pAllocationRequest->itemsToMakeLostCount > 0)
8525 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8527 ++pAllocationRequest->item;
8529 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8530 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8531 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8532 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8534 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8535 --pAllocationRequest->itemsToMakeLostCount;
8543 VMA_HEAVY_ASSERT(Validate());
8544 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8545 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8550 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8552 uint32_t lostAllocationCount = 0;
8553 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8554 it != m_Suballocations.end();
8557 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8558 it->hAllocation->CanBecomeLost() &&
8559 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8561 it = FreeSuballocation(it);
8562 ++lostAllocationCount;
8565 return lostAllocationCount;
8568 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8570 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8571 it != m_Suballocations.end();
8574 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8576 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8578 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8579 return VK_ERROR_VALIDATION_FAILED_EXT;
8581 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8583 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8584 return VK_ERROR_VALIDATION_FAILED_EXT;
8592 void VmaBlockMetadata_Generic::Alloc(
8593 const VmaAllocationRequest& request,
8594 VmaSuballocationType type,
8595 VkDeviceSize allocSize,
8598 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8599 VMA_ASSERT(request.item != m_Suballocations.end());
8600 VmaSuballocation& suballoc = *request.item;
8602 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8604 VMA_ASSERT(request.offset >= suballoc.offset);
8605 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8606 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8607 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8611 UnregisterFreeSuballocation(request.item);
8613 suballoc.offset = request.offset;
8614 suballoc.size = allocSize;
8615 suballoc.type = type;
8616 suballoc.hAllocation = hAllocation;
8621 VmaSuballocation paddingSuballoc = {};
8622 paddingSuballoc.offset = request.offset + allocSize;
8623 paddingSuballoc.size = paddingEnd;
8624 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8625 VmaSuballocationList::iterator next = request.item;
8627 const VmaSuballocationList::iterator paddingEndItem =
8628 m_Suballocations.insert(next, paddingSuballoc);
8629 RegisterFreeSuballocation(paddingEndItem);
8635 VmaSuballocation paddingSuballoc = {};
8636 paddingSuballoc.offset = request.offset - paddingBegin;
8637 paddingSuballoc.size = paddingBegin;
8638 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8639 const VmaSuballocationList::iterator paddingBeginItem =
8640 m_Suballocations.insert(request.item, paddingSuballoc);
8641 RegisterFreeSuballocation(paddingBeginItem);
8645 m_FreeCount = m_FreeCount - 1;
8646 if(paddingBegin > 0)
8654 m_SumFreeSize -= allocSize;
8657 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8659 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8660 suballocItem != m_Suballocations.end();
8663 VmaSuballocation& suballoc = *suballocItem;
8664 if(suballoc.hAllocation == allocation)
8666 FreeSuballocation(suballocItem);
8667 VMA_HEAVY_ASSERT(Validate());
8671 VMA_ASSERT(0 &&
"Not found!");
8674 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8676 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8677 suballocItem != m_Suballocations.end();
8680 VmaSuballocation& suballoc = *suballocItem;
8681 if(suballoc.offset == offset)
8683 FreeSuballocation(suballocItem);
8687 VMA_ASSERT(0 &&
"Not found!");
8690 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
8692 VkDeviceSize lastSize = 0;
8693 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8695 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8697 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8698 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8699 VMA_VALIDATE(it->size >= lastSize);
8700 lastSize = it->size;
8705 bool VmaBlockMetadata_Generic::CheckAllocation(
8706 uint32_t currentFrameIndex,
8707 uint32_t frameInUseCount,
8708 VkDeviceSize bufferImageGranularity,
8709 VkDeviceSize allocSize,
8710 VkDeviceSize allocAlignment,
8711 VmaSuballocationType allocType,
8712 VmaSuballocationList::const_iterator suballocItem,
8713 bool canMakeOtherLost,
8714 VkDeviceSize* pOffset,
8715 size_t* itemsToMakeLostCount,
8716 VkDeviceSize* pSumFreeSize,
8717 VkDeviceSize* pSumItemSize)
const
8719 VMA_ASSERT(allocSize > 0);
8720 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8721 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8722 VMA_ASSERT(pOffset != VMA_NULL);
8724 *itemsToMakeLostCount = 0;
8728 if(canMakeOtherLost)
8730 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8732 *pSumFreeSize = suballocItem->size;
8736 if(suballocItem->hAllocation->CanBecomeLost() &&
8737 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8739 ++*itemsToMakeLostCount;
8740 *pSumItemSize = suballocItem->size;
8749 if(GetSize() - suballocItem->offset < allocSize)
8755 *pOffset = suballocItem->offset;
8758 if(VMA_DEBUG_MARGIN > 0)
8760 *pOffset += VMA_DEBUG_MARGIN;
8764 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8768 if(bufferImageGranularity > 1)
8770 bool bufferImageGranularityConflict =
false;
8771 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8772 while(prevSuballocItem != m_Suballocations.cbegin())
8775 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8776 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8778 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8780 bufferImageGranularityConflict =
true;
8788 if(bufferImageGranularityConflict)
8790 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8796 if(*pOffset >= suballocItem->offset + suballocItem->size)
8802 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8805 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8807 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8809 if(suballocItem->offset + totalSize > GetSize())
8816 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8817 if(totalSize > suballocItem->size)
8819 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8820 while(remainingSize > 0)
8823 if(lastSuballocItem == m_Suballocations.cend())
8827 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8829 *pSumFreeSize += lastSuballocItem->size;
8833 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8834 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8835 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8837 ++*itemsToMakeLostCount;
8838 *pSumItemSize += lastSuballocItem->size;
8845 remainingSize = (lastSuballocItem->size < remainingSize) ?
8846 remainingSize - lastSuballocItem->size : 0;
8852 if(bufferImageGranularity > 1)
8854 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8856 while(nextSuballocItem != m_Suballocations.cend())
8858 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8859 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8861 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8863 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8864 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8865 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8867 ++*itemsToMakeLostCount;
8886 const VmaSuballocation& suballoc = *suballocItem;
8887 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8889 *pSumFreeSize = suballoc.size;
8892 if(suballoc.size < allocSize)
8898 *pOffset = suballoc.offset;
8901 if(VMA_DEBUG_MARGIN > 0)
8903 *pOffset += VMA_DEBUG_MARGIN;
8907 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8911 if(bufferImageGranularity > 1)
8913 bool bufferImageGranularityConflict =
false;
8914 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8915 while(prevSuballocItem != m_Suballocations.cbegin())
8918 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8919 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8921 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8923 bufferImageGranularityConflict =
true;
8931 if(bufferImageGranularityConflict)
8933 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8938 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8941 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8944 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8951 if(bufferImageGranularity > 1)
8953 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8955 while(nextSuballocItem != m_Suballocations.cend())
8957 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8958 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8960 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8979 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8981 VMA_ASSERT(item != m_Suballocations.end());
8982 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8984 VmaSuballocationList::iterator nextItem = item;
8986 VMA_ASSERT(nextItem != m_Suballocations.end());
8987 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8989 item->size += nextItem->size;
8991 m_Suballocations.erase(nextItem);
8994 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8997 VmaSuballocation& suballoc = *suballocItem;
8998 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8999 suballoc.hAllocation = VK_NULL_HANDLE;
9003 m_SumFreeSize += suballoc.size;
9006 bool mergeWithNext =
false;
9007 bool mergeWithPrev =
false;
9009 VmaSuballocationList::iterator nextItem = suballocItem;
9011 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9013 mergeWithNext =
true;
9016 VmaSuballocationList::iterator prevItem = suballocItem;
9017 if(suballocItem != m_Suballocations.begin())
9020 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9022 mergeWithPrev =
true;
9028 UnregisterFreeSuballocation(nextItem);
9029 MergeFreeWithNext(suballocItem);
9034 UnregisterFreeSuballocation(prevItem);
9035 MergeFreeWithNext(prevItem);
9036 RegisterFreeSuballocation(prevItem);
9041 RegisterFreeSuballocation(suballocItem);
9042 return suballocItem;
9046 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9048 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9049 VMA_ASSERT(item->size > 0);
9053 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9055 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9057 if(m_FreeSuballocationsBySize.empty())
9059 m_FreeSuballocationsBySize.push_back(item);
9063 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9071 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9073 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9074 VMA_ASSERT(item->size > 0);
9078 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9080 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9082 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9083 m_FreeSuballocationsBySize.data(),
9084 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9086 VmaSuballocationItemSizeLess());
9087 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9088 index < m_FreeSuballocationsBySize.size();
9091 if(m_FreeSuballocationsBySize[index] == item)
9093 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9096 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9098 VMA_ASSERT(0 &&
"Not found.");
9104 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9105 VkDeviceSize bufferImageGranularity,
9106 VmaSuballocationType& inOutPrevSuballocType)
const
9108 if(bufferImageGranularity == 1 || IsEmpty())
9113 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9114 bool typeConflictFound =
false;
9115 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9116 it != m_Suballocations.cend();
9119 const VmaSuballocationType suballocType = it->type;
9120 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9122 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9123 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9125 typeConflictFound =
true;
9127 inOutPrevSuballocType = suballocType;
9131 return typeConflictFound || minAlignment >= bufferImageGranularity;
9137 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9138 VmaBlockMetadata(hAllocator),
9140 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9141 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9142 m_1stVectorIndex(0),
9143 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9144 m_1stNullItemsBeginCount(0),
9145 m_1stNullItemsMiddleCount(0),
9146 m_2ndNullItemsCount(0)
9150 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9154 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9156 VmaBlockMetadata::Init(size);
9157 m_SumFreeSize = size;
9160 bool VmaBlockMetadata_Linear::Validate()
const
9162 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9163 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9165 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9166 VMA_VALIDATE(!suballocations1st.empty() ||
9167 suballocations2nd.empty() ||
9168 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9170 if(!suballocations1st.empty())
9173 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9175 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9177 if(!suballocations2nd.empty())
9180 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9183 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9184 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9186 VkDeviceSize sumUsedSize = 0;
9187 const size_t suballoc1stCount = suballocations1st.size();
9188 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9190 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9192 const size_t suballoc2ndCount = suballocations2nd.size();
9193 size_t nullItem2ndCount = 0;
9194 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9196 const VmaSuballocation& suballoc = suballocations2nd[i];
9197 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9199 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9200 VMA_VALIDATE(suballoc.offset >= offset);
9204 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9205 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9206 sumUsedSize += suballoc.size;
9213 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9216 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9219 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9221 const VmaSuballocation& suballoc = suballocations1st[i];
9222 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9223 suballoc.hAllocation == VK_NULL_HANDLE);
9226 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9228 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9230 const VmaSuballocation& suballoc = suballocations1st[i];
9231 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9233 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9234 VMA_VALIDATE(suballoc.offset >= offset);
9235 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9239 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9240 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9241 sumUsedSize += suballoc.size;
9248 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9250 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9252 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9254 const size_t suballoc2ndCount = suballocations2nd.size();
9255 size_t nullItem2ndCount = 0;
9256 for(
size_t i = suballoc2ndCount; i--; )
9258 const VmaSuballocation& suballoc = suballocations2nd[i];
9259 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9261 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9262 VMA_VALIDATE(suballoc.offset >= offset);
9266 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9267 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9268 sumUsedSize += suballoc.size;
9275 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9278 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9281 VMA_VALIDATE(offset <= GetSize());
9282 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9287 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9289 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9290 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9293 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9295 const VkDeviceSize size = GetSize();
9307 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9309 switch(m_2ndVectorMode)
9311 case SECOND_VECTOR_EMPTY:
9317 const size_t suballocations1stCount = suballocations1st.size();
9318 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9319 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9320 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9322 firstSuballoc.offset,
9323 size - (lastSuballoc.offset + lastSuballoc.size));
9327 case SECOND_VECTOR_RING_BUFFER:
9332 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9333 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9334 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9335 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9339 case SECOND_VECTOR_DOUBLE_STACK:
9344 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9345 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9346 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9347 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9357 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9359 const VkDeviceSize size = GetSize();
9360 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9361 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9362 const size_t suballoc1stCount = suballocations1st.size();
9363 const size_t suballoc2ndCount = suballocations2nd.size();
9374 VkDeviceSize lastOffset = 0;
9376 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9378 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9379 size_t nextAlloc2ndIndex = 0;
9380 while(lastOffset < freeSpace2ndTo1stEnd)
9383 while(nextAlloc2ndIndex < suballoc2ndCount &&
9384 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9386 ++nextAlloc2ndIndex;
9390 if(nextAlloc2ndIndex < suballoc2ndCount)
9392 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9395 if(lastOffset < suballoc.offset)
9398 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9412 lastOffset = suballoc.offset + suballoc.size;
9413 ++nextAlloc2ndIndex;
9419 if(lastOffset < freeSpace2ndTo1stEnd)
9421 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9429 lastOffset = freeSpace2ndTo1stEnd;
9434 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9435 const VkDeviceSize freeSpace1stTo2ndEnd =
9436 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9437 while(lastOffset < freeSpace1stTo2ndEnd)
9440 while(nextAlloc1stIndex < suballoc1stCount &&
9441 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9443 ++nextAlloc1stIndex;
9447 if(nextAlloc1stIndex < suballoc1stCount)
9449 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9452 if(lastOffset < suballoc.offset)
9455 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9469 lastOffset = suballoc.offset + suballoc.size;
9470 ++nextAlloc1stIndex;
9476 if(lastOffset < freeSpace1stTo2ndEnd)
9478 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9486 lastOffset = freeSpace1stTo2ndEnd;
9490 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9492 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9493 while(lastOffset < size)
9496 while(nextAlloc2ndIndex != SIZE_MAX &&
9497 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9499 --nextAlloc2ndIndex;
9503 if(nextAlloc2ndIndex != SIZE_MAX)
9505 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9508 if(lastOffset < suballoc.offset)
9511 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9525 lastOffset = suballoc.offset + suballoc.size;
9526 --nextAlloc2ndIndex;
9532 if(lastOffset < size)
9534 const VkDeviceSize unusedRangeSize = size - lastOffset;
9550 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
9552 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9553 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9554 const VkDeviceSize size = GetSize();
9555 const size_t suballoc1stCount = suballocations1st.size();
9556 const size_t suballoc2ndCount = suballocations2nd.size();
9558 inoutStats.
size += size;
9560 VkDeviceSize lastOffset = 0;
9562 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9564 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9565 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9566 while(lastOffset < freeSpace2ndTo1stEnd)
9569 while(nextAlloc2ndIndex < suballoc2ndCount &&
9570 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9572 ++nextAlloc2ndIndex;
9576 if(nextAlloc2ndIndex < suballoc2ndCount)
9578 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9581 if(lastOffset < suballoc.offset)
9584 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9595 lastOffset = suballoc.offset + suballoc.size;
9596 ++nextAlloc2ndIndex;
9601 if(lastOffset < freeSpace2ndTo1stEnd)
9604 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9611 lastOffset = freeSpace2ndTo1stEnd;
9616 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9617 const VkDeviceSize freeSpace1stTo2ndEnd =
9618 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9619 while(lastOffset < freeSpace1stTo2ndEnd)
9622 while(nextAlloc1stIndex < suballoc1stCount &&
9623 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9625 ++nextAlloc1stIndex;
9629 if(nextAlloc1stIndex < suballoc1stCount)
9631 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9634 if(lastOffset < suballoc.offset)
9637 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9648 lastOffset = suballoc.offset + suballoc.size;
9649 ++nextAlloc1stIndex;
9654 if(lastOffset < freeSpace1stTo2ndEnd)
9657 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9664 lastOffset = freeSpace1stTo2ndEnd;
9668 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9670 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9671 while(lastOffset < size)
9674 while(nextAlloc2ndIndex != SIZE_MAX &&
9675 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9677 --nextAlloc2ndIndex;
9681 if(nextAlloc2ndIndex != SIZE_MAX)
9683 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9686 if(lastOffset < suballoc.offset)
9689 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9700 lastOffset = suballoc.offset + suballoc.size;
9701 --nextAlloc2ndIndex;
9706 if(lastOffset < size)
9709 const VkDeviceSize unusedRangeSize = size - lastOffset;
9722 #if VMA_STATS_STRING_ENABLED
9723 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
9725 const VkDeviceSize size = GetSize();
9726 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9727 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9728 const size_t suballoc1stCount = suballocations1st.size();
9729 const size_t suballoc2ndCount = suballocations2nd.size();
9733 size_t unusedRangeCount = 0;
9734 VkDeviceSize usedBytes = 0;
9736 VkDeviceSize lastOffset = 0;
9738 size_t alloc2ndCount = 0;
9739 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9741 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9742 size_t nextAlloc2ndIndex = 0;
9743 while(lastOffset < freeSpace2ndTo1stEnd)
9746 while(nextAlloc2ndIndex < suballoc2ndCount &&
9747 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9749 ++nextAlloc2ndIndex;
9753 if(nextAlloc2ndIndex < suballoc2ndCount)
9755 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9758 if(lastOffset < suballoc.offset)
9767 usedBytes += suballoc.size;
9770 lastOffset = suballoc.offset + suballoc.size;
9771 ++nextAlloc2ndIndex;
9776 if(lastOffset < freeSpace2ndTo1stEnd)
9783 lastOffset = freeSpace2ndTo1stEnd;
9788 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9789 size_t alloc1stCount = 0;
9790 const VkDeviceSize freeSpace1stTo2ndEnd =
9791 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9792 while(lastOffset < freeSpace1stTo2ndEnd)
9795 while(nextAlloc1stIndex < suballoc1stCount &&
9796 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9798 ++nextAlloc1stIndex;
9802 if(nextAlloc1stIndex < suballoc1stCount)
9804 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9807 if(lastOffset < suballoc.offset)
9816 usedBytes += suballoc.size;
9819 lastOffset = suballoc.offset + suballoc.size;
9820 ++nextAlloc1stIndex;
9825 if(lastOffset < size)
9832 lastOffset = freeSpace1stTo2ndEnd;
9836 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9838 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9839 while(lastOffset < size)
9842 while(nextAlloc2ndIndex != SIZE_MAX &&
9843 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9845 --nextAlloc2ndIndex;
9849 if(nextAlloc2ndIndex != SIZE_MAX)
9851 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9854 if(lastOffset < suballoc.offset)
9863 usedBytes += suballoc.size;
9866 lastOffset = suballoc.offset + suballoc.size;
9867 --nextAlloc2ndIndex;
9872 if(lastOffset < size)
9884 const VkDeviceSize unusedBytes = size - usedBytes;
9885 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9890 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9892 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9893 size_t nextAlloc2ndIndex = 0;
9894 while(lastOffset < freeSpace2ndTo1stEnd)
9897 while(nextAlloc2ndIndex < suballoc2ndCount &&
9898 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9900 ++nextAlloc2ndIndex;
9904 if(nextAlloc2ndIndex < suballoc2ndCount)
9906 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9909 if(lastOffset < suballoc.offset)
9912 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9913 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9918 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9921 lastOffset = suballoc.offset + suballoc.size;
9922 ++nextAlloc2ndIndex;
9927 if(lastOffset < freeSpace2ndTo1stEnd)
9930 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9931 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9935 lastOffset = freeSpace2ndTo1stEnd;
9940 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9941 while(lastOffset < freeSpace1stTo2ndEnd)
9944 while(nextAlloc1stIndex < suballoc1stCount &&
9945 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9947 ++nextAlloc1stIndex;
9951 if(nextAlloc1stIndex < suballoc1stCount)
9953 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9956 if(lastOffset < suballoc.offset)
9959 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9960 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9965 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9968 lastOffset = suballoc.offset + suballoc.size;
9969 ++nextAlloc1stIndex;
9974 if(lastOffset < freeSpace1stTo2ndEnd)
9977 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9978 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9982 lastOffset = freeSpace1stTo2ndEnd;
9986 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9988 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9989 while(lastOffset < size)
9992 while(nextAlloc2ndIndex != SIZE_MAX &&
9993 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9995 --nextAlloc2ndIndex;
9999 if(nextAlloc2ndIndex != SIZE_MAX)
10001 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10004 if(lastOffset < suballoc.offset)
10007 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10008 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10013 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10016 lastOffset = suballoc.offset + suballoc.size;
10017 --nextAlloc2ndIndex;
10022 if(lastOffset < size)
10025 const VkDeviceSize unusedRangeSize = size - lastOffset;
10026 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10035 PrintDetailedMap_End(json);
10037 #endif // #if VMA_STATS_STRING_ENABLED
10039 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10040 uint32_t currentFrameIndex,
10041 uint32_t frameInUseCount,
10042 VkDeviceSize bufferImageGranularity,
10043 VkDeviceSize allocSize,
10044 VkDeviceSize allocAlignment,
10046 VmaSuballocationType allocType,
10047 bool canMakeOtherLost,
10049 VmaAllocationRequest* pAllocationRequest)
10051 VMA_ASSERT(allocSize > 0);
10052 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10053 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10054 VMA_HEAVY_ASSERT(Validate());
10055 return upperAddress ?
10056 CreateAllocationRequest_UpperAddress(
10057 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10058 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10059 CreateAllocationRequest_LowerAddress(
10060 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10061 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10064 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10065 uint32_t currentFrameIndex,
10066 uint32_t frameInUseCount,
10067 VkDeviceSize bufferImageGranularity,
10068 VkDeviceSize allocSize,
10069 VkDeviceSize allocAlignment,
10070 VmaSuballocationType allocType,
10071 bool canMakeOtherLost,
10073 VmaAllocationRequest* pAllocationRequest)
10075 const VkDeviceSize size = GetSize();
10076 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10077 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10079 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10081 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10086 if(allocSize > size)
10090 VkDeviceSize resultBaseOffset = size - allocSize;
10091 if(!suballocations2nd.empty())
10093 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10094 resultBaseOffset = lastSuballoc.offset - allocSize;
10095 if(allocSize > lastSuballoc.offset)
10102 VkDeviceSize resultOffset = resultBaseOffset;
10105 if(VMA_DEBUG_MARGIN > 0)
10107 if(resultOffset < VMA_DEBUG_MARGIN)
10111 resultOffset -= VMA_DEBUG_MARGIN;
10115 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10119 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10121 bool bufferImageGranularityConflict =
false;
10122 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10124 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10125 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10127 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10129 bufferImageGranularityConflict =
true;
10137 if(bufferImageGranularityConflict)
10139 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10144 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10145 suballocations1st.back().offset + suballocations1st.back().size :
10147 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10151 if(bufferImageGranularity > 1)
10153 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10155 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10156 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10158 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10172 pAllocationRequest->offset = resultOffset;
10173 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10174 pAllocationRequest->sumItemSize = 0;
10176 pAllocationRequest->itemsToMakeLostCount = 0;
10177 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10184 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10185 uint32_t currentFrameIndex,
10186 uint32_t frameInUseCount,
10187 VkDeviceSize bufferImageGranularity,
10188 VkDeviceSize allocSize,
10189 VkDeviceSize allocAlignment,
10190 VmaSuballocationType allocType,
10191 bool canMakeOtherLost,
10193 VmaAllocationRequest* pAllocationRequest)
10195 const VkDeviceSize size = GetSize();
10196 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10197 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10199 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10203 VkDeviceSize resultBaseOffset = 0;
10204 if(!suballocations1st.empty())
10206 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10207 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10211 VkDeviceSize resultOffset = resultBaseOffset;
10214 if(VMA_DEBUG_MARGIN > 0)
10216 resultOffset += VMA_DEBUG_MARGIN;
10220 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10224 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10226 bool bufferImageGranularityConflict =
false;
10227 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10229 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10230 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10232 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10234 bufferImageGranularityConflict =
true;
10242 if(bufferImageGranularityConflict)
10244 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10248 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10249 suballocations2nd.back().offset : size;
10252 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10256 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10258 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10260 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10261 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10263 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10277 pAllocationRequest->offset = resultOffset;
10278 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10279 pAllocationRequest->sumItemSize = 0;
10281 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10282 pAllocationRequest->itemsToMakeLostCount = 0;
10289 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10291 VMA_ASSERT(!suballocations1st.empty());
10293 VkDeviceSize resultBaseOffset = 0;
10294 if(!suballocations2nd.empty())
10296 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10297 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10301 VkDeviceSize resultOffset = resultBaseOffset;
10304 if(VMA_DEBUG_MARGIN > 0)
10306 resultOffset += VMA_DEBUG_MARGIN;
10310 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10314 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10316 bool bufferImageGranularityConflict =
false;
10317 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10319 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10320 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10322 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10324 bufferImageGranularityConflict =
true;
10332 if(bufferImageGranularityConflict)
10334 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10338 pAllocationRequest->itemsToMakeLostCount = 0;
10339 pAllocationRequest->sumItemSize = 0;
10340 size_t index1st = m_1stNullItemsBeginCount;
10342 if(canMakeOtherLost)
10344 while(index1st < suballocations1st.size() &&
10345 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10348 const VmaSuballocation& suballoc = suballocations1st[index1st];
10349 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10355 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10356 if(suballoc.hAllocation->CanBecomeLost() &&
10357 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10359 ++pAllocationRequest->itemsToMakeLostCount;
10360 pAllocationRequest->sumItemSize += suballoc.size;
10372 if(bufferImageGranularity > 1)
10374 while(index1st < suballocations1st.size())
10376 const VmaSuballocation& suballoc = suballocations1st[index1st];
10377 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10379 if(suballoc.hAllocation != VK_NULL_HANDLE)
10382 if(suballoc.hAllocation->CanBecomeLost() &&
10383 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10385 ++pAllocationRequest->itemsToMakeLostCount;
10386 pAllocationRequest->sumItemSize += suballoc.size;
10404 if(index1st == suballocations1st.size() &&
10405 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10408 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10413 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10414 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10418 if(bufferImageGranularity > 1)
10420 for(
size_t nextSuballocIndex = index1st;
10421 nextSuballocIndex < suballocations1st.size();
10422 nextSuballocIndex++)
10424 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10425 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10427 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10441 pAllocationRequest->offset = resultOffset;
10442 pAllocationRequest->sumFreeSize =
10443 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10445 - pAllocationRequest->sumItemSize;
10446 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10455 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10456 uint32_t currentFrameIndex,
10457 uint32_t frameInUseCount,
10458 VmaAllocationRequest* pAllocationRequest)
10460 if(pAllocationRequest->itemsToMakeLostCount == 0)
10465 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10468 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10469 size_t index = m_1stNullItemsBeginCount;
10470 size_t madeLostCount = 0;
10471 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10473 if(index == suballocations->size())
10477 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10479 suballocations = &AccessSuballocations2nd();
10483 VMA_ASSERT(!suballocations->empty());
10485 VmaSuballocation& suballoc = (*suballocations)[index];
10486 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10488 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10489 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10490 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10492 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10493 suballoc.hAllocation = VK_NULL_HANDLE;
10494 m_SumFreeSize += suballoc.size;
10495 if(suballocations == &AccessSuballocations1st())
10497 ++m_1stNullItemsMiddleCount;
10501 ++m_2ndNullItemsCount;
10513 CleanupAfterFree();
10519 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10521 uint32_t lostAllocationCount = 0;
10523 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10524 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10526 VmaSuballocation& suballoc = suballocations1st[i];
10527 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10528 suballoc.hAllocation->CanBecomeLost() &&
10529 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10531 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10532 suballoc.hAllocation = VK_NULL_HANDLE;
10533 ++m_1stNullItemsMiddleCount;
10534 m_SumFreeSize += suballoc.size;
10535 ++lostAllocationCount;
10539 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10540 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10542 VmaSuballocation& suballoc = suballocations2nd[i];
10543 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10544 suballoc.hAllocation->CanBecomeLost() &&
10545 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10547 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10548 suballoc.hAllocation = VK_NULL_HANDLE;
10549 ++m_2ndNullItemsCount;
10550 m_SumFreeSize += suballoc.size;
10551 ++lostAllocationCount;
10555 if(lostAllocationCount)
10557 CleanupAfterFree();
10560 return lostAllocationCount;
10563 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10565 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10566 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10568 const VmaSuballocation& suballoc = suballocations1st[i];
10569 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10571 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10573 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10574 return VK_ERROR_VALIDATION_FAILED_EXT;
10576 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10578 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10579 return VK_ERROR_VALIDATION_FAILED_EXT;
10584 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10585 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10587 const VmaSuballocation& suballoc = suballocations2nd[i];
10588 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10590 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10592 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10593 return VK_ERROR_VALIDATION_FAILED_EXT;
10595 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10597 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10598 return VK_ERROR_VALIDATION_FAILED_EXT;
10606 void VmaBlockMetadata_Linear::Alloc(
10607 const VmaAllocationRequest& request,
10608 VmaSuballocationType type,
10609 VkDeviceSize allocSize,
10612 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10614 switch(request.type)
10616 case VmaAllocationRequestType::UpperAddress:
10618 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10619 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10620 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10621 suballocations2nd.push_back(newSuballoc);
10622 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10625 case VmaAllocationRequestType::EndOf1st:
10627 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10629 VMA_ASSERT(suballocations1st.empty() ||
10630 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10632 VMA_ASSERT(request.offset + allocSize <= GetSize());
10634 suballocations1st.push_back(newSuballoc);
10637 case VmaAllocationRequestType::EndOf2nd:
10639 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10641 VMA_ASSERT(!suballocations1st.empty() &&
10642 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10643 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10645 switch(m_2ndVectorMode)
10647 case SECOND_VECTOR_EMPTY:
10649 VMA_ASSERT(suballocations2nd.empty());
10650 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10652 case SECOND_VECTOR_RING_BUFFER:
10654 VMA_ASSERT(!suballocations2nd.empty());
10656 case SECOND_VECTOR_DOUBLE_STACK:
10657 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10663 suballocations2nd.push_back(newSuballoc);
10667 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10670 m_SumFreeSize -= newSuballoc.size;
10673 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10675 FreeAtOffset(allocation->GetOffset());
10678 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10680 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10681 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10683 if(!suballocations1st.empty())
10686 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10687 if(firstSuballoc.offset == offset)
10689 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10690 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10691 m_SumFreeSize += firstSuballoc.size;
10692 ++m_1stNullItemsBeginCount;
10693 CleanupAfterFree();
10699 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10700 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10702 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10703 if(lastSuballoc.offset == offset)
10705 m_SumFreeSize += lastSuballoc.size;
10706 suballocations2nd.pop_back();
10707 CleanupAfterFree();
10712 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10714 VmaSuballocation& lastSuballoc = suballocations1st.back();
10715 if(lastSuballoc.offset == offset)
10717 m_SumFreeSize += lastSuballoc.size;
10718 suballocations1st.pop_back();
10719 CleanupAfterFree();
10726 VmaSuballocation refSuballoc;
10727 refSuballoc.offset = offset;
10729 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10730 suballocations1st.begin() + m_1stNullItemsBeginCount,
10731 suballocations1st.end(),
10733 VmaSuballocationOffsetLess());
10734 if(it != suballocations1st.end())
10736 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10737 it->hAllocation = VK_NULL_HANDLE;
10738 ++m_1stNullItemsMiddleCount;
10739 m_SumFreeSize += it->size;
10740 CleanupAfterFree();
10745 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10748 VmaSuballocation refSuballoc;
10749 refSuballoc.offset = offset;
10751 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10752 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10753 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10754 if(it != suballocations2nd.end())
10756 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10757 it->hAllocation = VK_NULL_HANDLE;
10758 ++m_2ndNullItemsCount;
10759 m_SumFreeSize += it->size;
10760 CleanupAfterFree();
10765 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10768 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
10770 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10771 const size_t suballocCount = AccessSuballocations1st().size();
10772 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10775 void VmaBlockMetadata_Linear::CleanupAfterFree()
10777 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10778 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10782 suballocations1st.clear();
10783 suballocations2nd.clear();
10784 m_1stNullItemsBeginCount = 0;
10785 m_1stNullItemsMiddleCount = 0;
10786 m_2ndNullItemsCount = 0;
10787 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10791 const size_t suballoc1stCount = suballocations1st.size();
10792 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10793 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10796 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10797 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10799 ++m_1stNullItemsBeginCount;
10800 --m_1stNullItemsMiddleCount;
10804 while(m_1stNullItemsMiddleCount > 0 &&
10805 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10807 --m_1stNullItemsMiddleCount;
10808 suballocations1st.pop_back();
10812 while(m_2ndNullItemsCount > 0 &&
10813 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10815 --m_2ndNullItemsCount;
10816 suballocations2nd.pop_back();
10820 while(m_2ndNullItemsCount > 0 &&
10821 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10823 --m_2ndNullItemsCount;
10824 VmaVectorRemove(suballocations2nd, 0);
10827 if(ShouldCompact1st())
10829 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10830 size_t srcIndex = m_1stNullItemsBeginCount;
10831 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10833 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10837 if(dstIndex != srcIndex)
10839 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10843 suballocations1st.resize(nonNullItemCount);
10844 m_1stNullItemsBeginCount = 0;
10845 m_1stNullItemsMiddleCount = 0;
10849 if(suballocations2nd.empty())
10851 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10855 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10857 suballocations1st.clear();
10858 m_1stNullItemsBeginCount = 0;
10860 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10863 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10864 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10865 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10866 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10868 ++m_1stNullItemsBeginCount;
10869 --m_1stNullItemsMiddleCount;
10871 m_2ndNullItemsCount = 0;
10872 m_1stVectorIndex ^= 1;
10877 VMA_HEAVY_ASSERT(Validate());
10884 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10885 VmaBlockMetadata(hAllocator),
10887 m_AllocationCount(0),
10891 memset(m_FreeList, 0,
sizeof(m_FreeList));
10894 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10896 DeleteNode(m_Root);
10899 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10901 VmaBlockMetadata::Init(size);
10903 m_UsableSize = VmaPrevPow2(size);
10904 m_SumFreeSize = m_UsableSize;
10908 while(m_LevelCount < MAX_LEVELS &&
10909 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10914 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10915 rootNode->offset = 0;
10916 rootNode->type = Node::TYPE_FREE;
10917 rootNode->parent = VMA_NULL;
10918 rootNode->buddy = VMA_NULL;
10921 AddToFreeListFront(0, rootNode);
10924 bool VmaBlockMetadata_Buddy::Validate()
const
10927 ValidationContext ctx;
10928 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10930 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10932 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10933 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10936 for(uint32_t level = 0; level < m_LevelCount; ++level)
10938 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10939 m_FreeList[level].front->free.prev == VMA_NULL);
10941 for(Node* node = m_FreeList[level].front;
10943 node = node->free.next)
10945 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10947 if(node->free.next == VMA_NULL)
10949 VMA_VALIDATE(m_FreeList[level].back == node);
10953 VMA_VALIDATE(node->free.next->free.prev == node);
10959 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10961 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10967 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
10969 for(uint32_t level = 0; level < m_LevelCount; ++level)
10971 if(m_FreeList[level].front != VMA_NULL)
10973 return LevelToNodeSize(level);
10979 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
10981 const VkDeviceSize unusableSize = GetUnusableSize();
10992 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10994 if(unusableSize > 0)
11003 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11005 const VkDeviceSize unusableSize = GetUnusableSize();
11007 inoutStats.
size += GetSize();
11008 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11013 if(unusableSize > 0)
11020 #if VMA_STATS_STRING_ENABLED
11022 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11026 CalcAllocationStatInfo(stat);
11028 PrintDetailedMap_Begin(
11034 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11036 const VkDeviceSize unusableSize = GetUnusableSize();
11037 if(unusableSize > 0)
11039 PrintDetailedMap_UnusedRange(json,
11044 PrintDetailedMap_End(json);
11047 #endif // #if VMA_STATS_STRING_ENABLED
11049 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11050 uint32_t currentFrameIndex,
11051 uint32_t frameInUseCount,
11052 VkDeviceSize bufferImageGranularity,
11053 VkDeviceSize allocSize,
11054 VkDeviceSize allocAlignment,
11056 VmaSuballocationType allocType,
11057 bool canMakeOtherLost,
11059 VmaAllocationRequest* pAllocationRequest)
11061 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11065 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11066 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11067 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11069 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11070 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11073 if(allocSize > m_UsableSize)
11078 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11079 for(uint32_t level = targetLevel + 1; level--; )
11081 for(Node* freeNode = m_FreeList[level].front;
11082 freeNode != VMA_NULL;
11083 freeNode = freeNode->free.next)
11085 if(freeNode->offset % allocAlignment == 0)
11087 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11088 pAllocationRequest->offset = freeNode->offset;
11089 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11090 pAllocationRequest->sumItemSize = 0;
11091 pAllocationRequest->itemsToMakeLostCount = 0;
11092 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11101 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11102 uint32_t currentFrameIndex,
11103 uint32_t frameInUseCount,
11104 VmaAllocationRequest* pAllocationRequest)
11110 return pAllocationRequest->itemsToMakeLostCount == 0;
11113 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11122 void VmaBlockMetadata_Buddy::Alloc(
11123 const VmaAllocationRequest& request,
11124 VmaSuballocationType type,
11125 VkDeviceSize allocSize,
11128 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11130 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11131 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11133 Node* currNode = m_FreeList[currLevel].front;
11134 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11135 while(currNode->offset != request.offset)
11137 currNode = currNode->free.next;
11138 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11142 while(currLevel < targetLevel)
11146 RemoveFromFreeList(currLevel, currNode);
11148 const uint32_t childrenLevel = currLevel + 1;
11151 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11152 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11154 leftChild->offset = currNode->offset;
11155 leftChild->type = Node::TYPE_FREE;
11156 leftChild->parent = currNode;
11157 leftChild->buddy = rightChild;
11159 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11160 rightChild->type = Node::TYPE_FREE;
11161 rightChild->parent = currNode;
11162 rightChild->buddy = leftChild;
11165 currNode->type = Node::TYPE_SPLIT;
11166 currNode->split.leftChild = leftChild;
11169 AddToFreeListFront(childrenLevel, rightChild);
11170 AddToFreeListFront(childrenLevel, leftChild);
11175 currNode = m_FreeList[currLevel].front;
11184 VMA_ASSERT(currLevel == targetLevel &&
11185 currNode != VMA_NULL &&
11186 currNode->type == Node::TYPE_FREE);
11187 RemoveFromFreeList(currLevel, currNode);
11190 currNode->type = Node::TYPE_ALLOCATION;
11191 currNode->allocation.alloc = hAllocation;
11193 ++m_AllocationCount;
11195 m_SumFreeSize -= allocSize;
11198 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11200 if(node->type == Node::TYPE_SPLIT)
11202 DeleteNode(node->split.leftChild->buddy);
11203 DeleteNode(node->split.leftChild);
11206 vma_delete(GetAllocationCallbacks(), node);
11209 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11211 VMA_VALIDATE(level < m_LevelCount);
11212 VMA_VALIDATE(curr->parent == parent);
11213 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11214 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11217 case Node::TYPE_FREE:
11219 ctx.calculatedSumFreeSize += levelNodeSize;
11220 ++ctx.calculatedFreeCount;
11222 case Node::TYPE_ALLOCATION:
11223 ++ctx.calculatedAllocationCount;
11224 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11225 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11227 case Node::TYPE_SPLIT:
11229 const uint32_t childrenLevel = level + 1;
11230 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11231 const Node*
const leftChild = curr->split.leftChild;
11232 VMA_VALIDATE(leftChild != VMA_NULL);
11233 VMA_VALIDATE(leftChild->offset == curr->offset);
11234 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11236 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11238 const Node*
const rightChild = leftChild->buddy;
11239 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11240 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11242 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11253 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11256 uint32_t level = 0;
11257 VkDeviceSize currLevelNodeSize = m_UsableSize;
11258 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11259 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11262 currLevelNodeSize = nextLevelNodeSize;
11263 nextLevelNodeSize = currLevelNodeSize >> 1;
11268 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11271 Node* node = m_Root;
11272 VkDeviceSize nodeOffset = 0;
11273 uint32_t level = 0;
11274 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11275 while(node->type == Node::TYPE_SPLIT)
11277 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11278 if(offset < nodeOffset + nextLevelSize)
11280 node = node->split.leftChild;
11284 node = node->split.leftChild->buddy;
11285 nodeOffset += nextLevelSize;
11288 levelNodeSize = nextLevelSize;
11291 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11292 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11295 --m_AllocationCount;
11296 m_SumFreeSize += alloc->GetSize();
11298 node->type = Node::TYPE_FREE;
11301 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11303 RemoveFromFreeList(level, node->buddy);
11304 Node*
const parent = node->parent;
11306 vma_delete(GetAllocationCallbacks(), node->buddy);
11307 vma_delete(GetAllocationCallbacks(), node);
11308 parent->type = Node::TYPE_FREE;
11316 AddToFreeListFront(level, node);
11319 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11323 case Node::TYPE_FREE:
11329 case Node::TYPE_ALLOCATION:
11331 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11337 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11338 if(unusedRangeSize > 0)
11347 case Node::TYPE_SPLIT:
11349 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11350 const Node*
const leftChild = node->split.leftChild;
11351 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11352 const Node*
const rightChild = leftChild->buddy;
11353 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11361 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11363 VMA_ASSERT(node->type == Node::TYPE_FREE);
11366 Node*
const frontNode = m_FreeList[level].front;
11367 if(frontNode == VMA_NULL)
11369 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11370 node->free.prev = node->free.next = VMA_NULL;
11371 m_FreeList[level].front = m_FreeList[level].back = node;
11375 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11376 node->free.prev = VMA_NULL;
11377 node->free.next = frontNode;
11378 frontNode->free.prev = node;
11379 m_FreeList[level].front = node;
11383 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11385 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11388 if(node->free.prev == VMA_NULL)
11390 VMA_ASSERT(m_FreeList[level].front == node);
11391 m_FreeList[level].front = node->free.next;
11395 Node*
const prevFreeNode = node->free.prev;
11396 VMA_ASSERT(prevFreeNode->free.next == node);
11397 prevFreeNode->free.next = node->free.next;
11401 if(node->free.next == VMA_NULL)
11403 VMA_ASSERT(m_FreeList[level].back == node);
11404 m_FreeList[level].back = node->free.prev;
11408 Node*
const nextFreeNode = node->free.next;
11409 VMA_ASSERT(nextFreeNode->free.prev == node);
11410 nextFreeNode->free.prev = node->free.prev;
11414 #if VMA_STATS_STRING_ENABLED
11415 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
11419 case Node::TYPE_FREE:
11420 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11422 case Node::TYPE_ALLOCATION:
11424 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11425 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11426 if(allocSize < levelNodeSize)
11428 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11432 case Node::TYPE_SPLIT:
11434 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11435 const Node*
const leftChild = node->split.leftChild;
11436 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11437 const Node*
const rightChild = leftChild->buddy;
11438 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11445 #endif // #if VMA_STATS_STRING_ENABLED
11451 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11452 m_pMetadata(VMA_NULL),
11453 m_MemoryTypeIndex(UINT32_MAX),
11455 m_hMemory(VK_NULL_HANDLE),
11457 m_pMappedData(VMA_NULL)
11461 void VmaDeviceMemoryBlock::Init(
11464 uint32_t newMemoryTypeIndex,
11465 VkDeviceMemory newMemory,
11466 VkDeviceSize newSize,
11468 uint32_t algorithm)
11470 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11472 m_hParentPool = hParentPool;
11473 m_MemoryTypeIndex = newMemoryTypeIndex;
11475 m_hMemory = newMemory;
11480 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11483 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11489 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11491 m_pMetadata->Init(newSize);
11494 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11498 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11500 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11501 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11502 m_hMemory = VK_NULL_HANDLE;
11504 vma_delete(allocator, m_pMetadata);
11505 m_pMetadata = VMA_NULL;
11508 bool VmaDeviceMemoryBlock::Validate()
const
11510 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11511 (m_pMetadata->GetSize() != 0));
11513 return m_pMetadata->Validate();
11516 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11518 void* pData =
nullptr;
11519 VkResult res = Map(hAllocator, 1, &pData);
11520 if(res != VK_SUCCESS)
11525 res = m_pMetadata->CheckCorruption(pData);
11527 Unmap(hAllocator, 1);
11532 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11539 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11540 if(m_MapCount != 0)
11542 m_MapCount += count;
11543 VMA_ASSERT(m_pMappedData != VMA_NULL);
11544 if(ppData != VMA_NULL)
11546 *ppData = m_pMappedData;
11552 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11553 hAllocator->m_hDevice,
11559 if(result == VK_SUCCESS)
11561 if(ppData != VMA_NULL)
11563 *ppData = m_pMappedData;
11565 m_MapCount = count;
11571 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11578 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11579 if(m_MapCount >= count)
11581 m_MapCount -= count;
11582 if(m_MapCount == 0)
11584 m_pMappedData = VMA_NULL;
11585 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11590 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11594 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11596 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11597 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11600 VkResult res = Map(hAllocator, 1, &pData);
11601 if(res != VK_SUCCESS)
11606 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11607 VmaWriteMagicValue(pData, allocOffset + allocSize);
11609 Unmap(hAllocator, 1);
11614 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11616 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11617 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11620 VkResult res = Map(hAllocator, 1, &pData);
11621 if(res != VK_SUCCESS)
11626 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11628 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11630 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11632 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11635 Unmap(hAllocator, 1);
11640 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11643 VkDeviceSize allocationLocalOffset,
11647 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11648 hAllocation->GetBlock() ==
this);
11649 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11650 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11651 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11653 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11654 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11657 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11660 VkDeviceSize allocationLocalOffset,
11664 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11665 hAllocation->GetBlock() ==
this);
11666 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11667 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11668 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11670 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11671 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11676 memset(&outInfo, 0,
sizeof(outInfo));
11695 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11703 VmaPool_T::VmaPool_T(
11706 VkDeviceSize preferredBlockSize) :
11710 createInfo.memoryTypeIndex,
11711 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11712 createInfo.minBlockCount,
11713 createInfo.maxBlockCount,
11715 createInfo.frameInUseCount,
11716 createInfo.blockSize != 0,
11723 VmaPool_T::~VmaPool_T()
11727 void VmaPool_T::SetName(
const char* pName)
11729 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
11730 VmaFreeString(allocs, m_Name);
11732 if(pName != VMA_NULL)
11734 m_Name = VmaCreateStringCopy(allocs, pName);
11742 #if VMA_STATS_STRING_ENABLED
11744 #endif // #if VMA_STATS_STRING_ENABLED
11746 VmaBlockVector::VmaBlockVector(
11749 uint32_t memoryTypeIndex,
11750 VkDeviceSize preferredBlockSize,
11751 size_t minBlockCount,
11752 size_t maxBlockCount,
11753 VkDeviceSize bufferImageGranularity,
11754 uint32_t frameInUseCount,
11755 bool explicitBlockSize,
11756 uint32_t algorithm) :
11757 m_hAllocator(hAllocator),
11758 m_hParentPool(hParentPool),
11759 m_MemoryTypeIndex(memoryTypeIndex),
11760 m_PreferredBlockSize(preferredBlockSize),
11761 m_MinBlockCount(minBlockCount),
11762 m_MaxBlockCount(maxBlockCount),
11763 m_BufferImageGranularity(bufferImageGranularity),
11764 m_FrameInUseCount(frameInUseCount),
11765 m_ExplicitBlockSize(explicitBlockSize),
11766 m_Algorithm(algorithm),
11767 m_HasEmptyBlock(false),
11768 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11773 VmaBlockVector::~VmaBlockVector()
11775 for(
size_t i = m_Blocks.size(); i--; )
11777 m_Blocks[i]->Destroy(m_hAllocator);
11778 vma_delete(m_hAllocator, m_Blocks[i]);
11782 VkResult VmaBlockVector::CreateMinBlocks()
11784 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11786 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11787 if(res != VK_SUCCESS)
11795 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11797 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11799 const size_t blockCount = m_Blocks.size();
11808 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11810 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11811 VMA_ASSERT(pBlock);
11812 VMA_HEAVY_ASSERT(pBlock->Validate());
11813 pBlock->m_pMetadata->AddPoolStats(*pStats);
11817 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
11819 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11820 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11821 (VMA_DEBUG_MARGIN > 0) &&
11823 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11826 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11828 VkResult VmaBlockVector::Allocate(
11829 uint32_t currentFrameIndex,
11831 VkDeviceSize alignment,
11833 VmaSuballocationType suballocType,
11834 size_t allocationCount,
11838 VkResult res = VK_SUCCESS;
11840 if(IsCorruptionDetectionEnabled())
11842 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11843 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11847 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11848 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11850 res = AllocatePage(
11856 pAllocations + allocIndex);
11857 if(res != VK_SUCCESS)
11864 if(res != VK_SUCCESS)
11867 while(allocIndex--)
11869 Free(pAllocations[allocIndex]);
11871 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11877 VkResult VmaBlockVector::AllocatePage(
11878 uint32_t currentFrameIndex,
11880 VkDeviceSize alignment,
11882 VmaSuballocationType suballocType,
11891 VkDeviceSize freeMemory;
11893 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
11895 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
11899 const bool canFallbackToDedicated = !IsCustomPool();
11900 const bool canCreateNewBlock =
11902 (m_Blocks.size() < m_MaxBlockCount) &&
11903 (freeMemory >= size || !canFallbackToDedicated);
11910 canMakeOtherLost =
false;
11914 if(isUpperAddress &&
11917 return VK_ERROR_FEATURE_NOT_PRESENT;
11931 return VK_ERROR_FEATURE_NOT_PRESENT;
11935 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11937 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11945 if(!canMakeOtherLost || canCreateNewBlock)
11954 if(!m_Blocks.empty())
11956 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11957 VMA_ASSERT(pCurrBlock);
11958 VkResult res = AllocateFromBlock(
11968 if(res == VK_SUCCESS)
11970 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
11980 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11982 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11983 VMA_ASSERT(pCurrBlock);
11984 VkResult res = AllocateFromBlock(
11994 if(res == VK_SUCCESS)
11996 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12004 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12006 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12007 VMA_ASSERT(pCurrBlock);
12008 VkResult res = AllocateFromBlock(
12018 if(res == VK_SUCCESS)
12020 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12028 if(canCreateNewBlock)
12031 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12032 uint32_t newBlockSizeShift = 0;
12033 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12035 if(!m_ExplicitBlockSize)
12038 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12039 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12041 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12042 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12044 newBlockSize = smallerNewBlockSize;
12045 ++newBlockSizeShift;
12054 size_t newBlockIndex = 0;
12055 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12056 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12058 if(!m_ExplicitBlockSize)
12060 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12062 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12063 if(smallerNewBlockSize >= size)
12065 newBlockSize = smallerNewBlockSize;
12066 ++newBlockSizeShift;
12067 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12068 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12077 if(res == VK_SUCCESS)
12079 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12080 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12082 res = AllocateFromBlock(
12092 if(res == VK_SUCCESS)
12094 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12100 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12107 if(canMakeOtherLost)
12109 uint32_t tryIndex = 0;
12110 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12112 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12113 VmaAllocationRequest bestRequest = {};
12114 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12120 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12122 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12123 VMA_ASSERT(pCurrBlock);
12124 VmaAllocationRequest currRequest = {};
12125 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12128 m_BufferImageGranularity,
12137 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12138 if(pBestRequestBlock == VMA_NULL ||
12139 currRequestCost < bestRequestCost)
12141 pBestRequestBlock = pCurrBlock;
12142 bestRequest = currRequest;
12143 bestRequestCost = currRequestCost;
12145 if(bestRequestCost == 0)
12156 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12158 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12159 VMA_ASSERT(pCurrBlock);
12160 VmaAllocationRequest currRequest = {};
12161 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12164 m_BufferImageGranularity,
12173 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12174 if(pBestRequestBlock == VMA_NULL ||
12175 currRequestCost < bestRequestCost ||
12178 pBestRequestBlock = pCurrBlock;
12179 bestRequest = currRequest;
12180 bestRequestCost = currRequestCost;
12182 if(bestRequestCost == 0 ||
12192 if(pBestRequestBlock != VMA_NULL)
12196 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12197 if(res != VK_SUCCESS)
12203 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12209 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12210 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12211 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12212 UpdateHasEmptyBlock();
12213 (*pAllocation)->InitBlockAllocation(
12215 bestRequest.offset,
12222 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12223 VMA_DEBUG_LOG(
" Returned from existing block");
12224 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12225 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12226 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12228 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12230 if(IsCorruptionDetectionEnabled())
12232 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12233 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12248 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12250 return VK_ERROR_TOO_MANY_OBJECTS;
12254 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12257 void VmaBlockVector::Free(
12260 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12262 bool budgetExceeded =
false;
12264 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12266 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12267 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12272 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12274 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12276 if(IsCorruptionDetectionEnabled())
12278 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12279 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12282 if(hAllocation->IsPersistentMap())
12284 pBlock->Unmap(m_hAllocator, 1);
12287 pBlock->m_pMetadata->Free(hAllocation);
12288 VMA_HEAVY_ASSERT(pBlock->Validate());
12290 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12292 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12294 if(pBlock->m_pMetadata->IsEmpty())
12297 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12299 pBlockToDelete = pBlock;
12306 else if(m_HasEmptyBlock && canDeleteBlock)
12308 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12309 if(pLastBlock->m_pMetadata->IsEmpty())
12311 pBlockToDelete = pLastBlock;
12312 m_Blocks.pop_back();
12316 UpdateHasEmptyBlock();
12317 IncrementallySortBlocks();
12322 if(pBlockToDelete != VMA_NULL)
12324 VMA_DEBUG_LOG(
" Deleted empty block");
12325 pBlockToDelete->Destroy(m_hAllocator);
12326 vma_delete(m_hAllocator, pBlockToDelete);
12330 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12332 VkDeviceSize result = 0;
12333 for(
size_t i = m_Blocks.size(); i--; )
12335 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12336 if(result >= m_PreferredBlockSize)
12344 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12346 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12348 if(m_Blocks[blockIndex] == pBlock)
12350 VmaVectorRemove(m_Blocks, blockIndex);
12357 void VmaBlockVector::IncrementallySortBlocks()
12362 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12364 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12366 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12373 VkResult VmaBlockVector::AllocateFromBlock(
12374 VmaDeviceMemoryBlock* pBlock,
12375 uint32_t currentFrameIndex,
12377 VkDeviceSize alignment,
12380 VmaSuballocationType suballocType,
12389 VmaAllocationRequest currRequest = {};
12390 if(pBlock->m_pMetadata->CreateAllocationRequest(
12393 m_BufferImageGranularity,
12403 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12407 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12408 if(res != VK_SUCCESS)
12414 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12415 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12416 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12417 UpdateHasEmptyBlock();
12418 (*pAllocation)->InitBlockAllocation(
12420 currRequest.offset,
12427 VMA_HEAVY_ASSERT(pBlock->Validate());
12428 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12429 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12430 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12432 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12434 if(IsCorruptionDetectionEnabled())
12436 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12437 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12441 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12444 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12446 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12447 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12448 allocInfo.allocationSize = blockSize;
12449 VkDeviceMemory mem = VK_NULL_HANDLE;
12450 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12459 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12465 allocInfo.allocationSize,
12469 m_Blocks.push_back(pBlock);
12470 if(pNewBlockIndex != VMA_NULL)
12472 *pNewBlockIndex = m_Blocks.size() - 1;
12478 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12479 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12480 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12482 const size_t blockCount = m_Blocks.size();
12483 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12487 BLOCK_FLAG_USED = 0x00000001,
12488 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12496 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12497 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12498 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12501 const size_t moveCount = moves.size();
12502 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12504 const VmaDefragmentationMove& move = moves[moveIndex];
12505 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12506 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12509 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12512 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12514 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12515 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12516 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12518 currBlockInfo.pMappedData = pBlock->GetMappedData();
12520 if(currBlockInfo.pMappedData == VMA_NULL)
12522 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12523 if(pDefragCtx->res == VK_SUCCESS)
12525 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12532 if(pDefragCtx->res == VK_SUCCESS)
12534 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12535 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12537 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12539 const VmaDefragmentationMove& move = moves[moveIndex];
12541 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12542 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12544 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12549 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12550 memRange.memory = pSrcBlock->GetDeviceMemory();
12551 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12552 memRange.size = VMA_MIN(
12553 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12554 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12555 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12560 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12561 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12562 static_cast<size_t>(move.size));
12564 if(IsCorruptionDetectionEnabled())
12566 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12567 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12573 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12574 memRange.memory = pDstBlock->GetDeviceMemory();
12575 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12576 memRange.size = VMA_MIN(
12577 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12578 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12579 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12586 for(
size_t blockIndex = blockCount; blockIndex--; )
12588 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12589 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12591 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12592 pBlock->Unmap(m_hAllocator, 1);
12597 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12598 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12599 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12600 VkCommandBuffer commandBuffer)
12602 const size_t blockCount = m_Blocks.size();
12604 pDefragCtx->blockContexts.resize(blockCount);
12605 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12608 const size_t moveCount = moves.size();
12609 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12611 const VmaDefragmentationMove& move = moves[moveIndex];
12612 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12613 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12616 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12620 VkBufferCreateInfo bufCreateInfo;
12621 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12623 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12625 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12626 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12627 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12629 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12630 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12631 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12632 if(pDefragCtx->res == VK_SUCCESS)
12634 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12635 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12642 if(pDefragCtx->res == VK_SUCCESS)
12644 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12646 const VmaDefragmentationMove& move = moves[moveIndex];
12648 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12649 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12651 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12653 VkBufferCopy region = {
12657 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12658 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12663 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12665 pDefragCtx->res = VK_NOT_READY;
12671 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12673 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12674 if(pBlock->m_pMetadata->IsEmpty())
12676 if(m_Blocks.size() > m_MinBlockCount)
12678 if(pDefragmentationStats != VMA_NULL)
12681 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12684 VmaVectorRemove(m_Blocks, blockIndex);
12685 pBlock->Destroy(m_hAllocator);
12686 vma_delete(m_hAllocator, pBlock);
12694 UpdateHasEmptyBlock();
12697 void VmaBlockVector::UpdateHasEmptyBlock()
12699 m_HasEmptyBlock =
false;
12700 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
12702 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
12703 if(pBlock->m_pMetadata->IsEmpty())
12705 m_HasEmptyBlock =
true;
12711 #if VMA_STATS_STRING_ENABLED
12713 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12715 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12717 json.BeginObject();
12721 const char* poolName = m_hParentPool->GetName();
12722 if(poolName != VMA_NULL && poolName[0] !=
'\0')
12724 json.WriteString(
"Name");
12725 json.WriteString(poolName);
12728 json.WriteString(
"MemoryTypeIndex");
12729 json.WriteNumber(m_MemoryTypeIndex);
12731 json.WriteString(
"BlockSize");
12732 json.WriteNumber(m_PreferredBlockSize);
12734 json.WriteString(
"BlockCount");
12735 json.BeginObject(
true);
12736 if(m_MinBlockCount > 0)
12738 json.WriteString(
"Min");
12739 json.WriteNumber((uint64_t)m_MinBlockCount);
12741 if(m_MaxBlockCount < SIZE_MAX)
12743 json.WriteString(
"Max");
12744 json.WriteNumber((uint64_t)m_MaxBlockCount);
12746 json.WriteString(
"Cur");
12747 json.WriteNumber((uint64_t)m_Blocks.size());
12750 if(m_FrameInUseCount > 0)
12752 json.WriteString(
"FrameInUseCount");
12753 json.WriteNumber(m_FrameInUseCount);
12756 if(m_Algorithm != 0)
12758 json.WriteString(
"Algorithm");
12759 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12764 json.WriteString(
"PreferredBlockSize");
12765 json.WriteNumber(m_PreferredBlockSize);
12768 json.WriteString(
"Blocks");
12769 json.BeginObject();
12770 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12772 json.BeginString();
12773 json.ContinueString(m_Blocks[i]->GetId());
12776 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12783 #endif // #if VMA_STATS_STRING_ENABLED
12785 void VmaBlockVector::Defragment(
12786 class VmaBlockVectorDefragmentationContext* pCtx,
12788 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12789 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12790 VkCommandBuffer commandBuffer)
12792 pCtx->res = VK_SUCCESS;
12794 const VkMemoryPropertyFlags memPropFlags =
12795 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12796 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12798 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12800 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12801 !IsCorruptionDetectionEnabled() &&
12802 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12805 if(canDefragmentOnCpu || canDefragmentOnGpu)
12807 bool defragmentOnGpu;
12809 if(canDefragmentOnGpu != canDefragmentOnCpu)
12811 defragmentOnGpu = canDefragmentOnGpu;
12816 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12817 m_hAllocator->IsIntegratedGpu();
12820 bool overlappingMoveSupported = !defragmentOnGpu;
12822 if(m_hAllocator->m_UseMutex)
12824 m_Mutex.LockWrite();
12825 pCtx->mutexLocked =
true;
12828 pCtx->Begin(overlappingMoveSupported);
12832 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12833 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12834 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12835 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12836 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12839 if(pStats != VMA_NULL)
12841 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12842 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12845 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12846 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12847 if(defragmentOnGpu)
12849 maxGpuBytesToMove -= bytesMoved;
12850 maxGpuAllocationsToMove -= allocationsMoved;
12854 maxCpuBytesToMove -= bytesMoved;
12855 maxCpuAllocationsToMove -= allocationsMoved;
12859 if(pCtx->res >= VK_SUCCESS)
12861 if(defragmentOnGpu)
12863 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12867 ApplyDefragmentationMovesCpu(pCtx, moves);
12873 void VmaBlockVector::DefragmentationEnd(
12874 class VmaBlockVectorDefragmentationContext* pCtx,
12878 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12880 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12881 if(blockCtx.hBuffer)
12883 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12884 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12888 if(pCtx->res >= VK_SUCCESS)
12890 FreeEmptyBlocks(pStats);
12893 if(pCtx->mutexLocked)
12895 VMA_ASSERT(m_hAllocator->m_UseMutex);
12896 m_Mutex.UnlockWrite();
12900 size_t VmaBlockVector::CalcAllocationCount()
const
12903 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12905 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12910 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
12912 if(m_BufferImageGranularity == 1)
12916 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12917 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12919 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12920 VMA_ASSERT(m_Algorithm == 0);
12921 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12922 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12930 void VmaBlockVector::MakePoolAllocationsLost(
12931 uint32_t currentFrameIndex,
12932 size_t* pLostAllocationCount)
12934 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12935 size_t lostAllocationCount = 0;
12936 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12938 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12939 VMA_ASSERT(pBlock);
12940 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12942 if(pLostAllocationCount != VMA_NULL)
12944 *pLostAllocationCount = lostAllocationCount;
12948 VkResult VmaBlockVector::CheckCorruption()
12950 if(!IsCorruptionDetectionEnabled())
12952 return VK_ERROR_FEATURE_NOT_PRESENT;
12955 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12956 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12958 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12959 VMA_ASSERT(pBlock);
12960 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12961 if(res != VK_SUCCESS)
12969 void VmaBlockVector::AddStats(
VmaStats* pStats)
12971 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12972 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12974 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12976 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12978 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12979 VMA_ASSERT(pBlock);
12980 VMA_HEAVY_ASSERT(pBlock->Validate());
12982 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12983 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12984 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12985 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12992 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12994 VmaBlockVector* pBlockVector,
12995 uint32_t currentFrameIndex,
12996 bool overlappingMoveSupported) :
12997 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12998 m_AllocationCount(0),
12999 m_AllAllocations(false),
13001 m_AllocationsMoved(0),
13002 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13005 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13006 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13008 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13009 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13010 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13011 m_Blocks.push_back(pBlockInfo);
13015 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13018 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13020 for(
size_t i = m_Blocks.size(); i--; )
13022 vma_delete(m_hAllocator, m_Blocks[i]);
13026 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13029 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13031 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13032 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13033 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13035 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13036 (*it)->m_Allocations.push_back(allocInfo);
13043 ++m_AllocationCount;
13047 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13048 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13049 VkDeviceSize maxBytesToMove,
13050 uint32_t maxAllocationsToMove)
13052 if(m_Blocks.empty())
13065 size_t srcBlockMinIndex = 0;
13078 size_t srcBlockIndex = m_Blocks.size() - 1;
13079 size_t srcAllocIndex = SIZE_MAX;
13085 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13087 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13090 if(srcBlockIndex == srcBlockMinIndex)
13097 srcAllocIndex = SIZE_MAX;
13102 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13106 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13107 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13109 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13110 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13111 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13112 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13115 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13117 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13118 VmaAllocationRequest dstAllocRequest;
13119 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13120 m_CurrentFrameIndex,
13121 m_pBlockVector->GetFrameInUseCount(),
13122 m_pBlockVector->GetBufferImageGranularity(),
13129 &dstAllocRequest) &&
13131 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13133 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13136 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13137 (m_BytesMoved + size > maxBytesToMove))
13142 VmaDefragmentationMove move;
13143 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13144 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13145 move.srcOffset = srcOffset;
13146 move.dstOffset = dstAllocRequest.offset;
13148 moves.push_back(move);
13150 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13154 allocInfo.m_hAllocation);
13155 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13157 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13159 if(allocInfo.m_pChanged != VMA_NULL)
13161 *allocInfo.m_pChanged = VK_TRUE;
13164 ++m_AllocationsMoved;
13165 m_BytesMoved += size;
13167 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13175 if(srcAllocIndex > 0)
13181 if(srcBlockIndex > 0)
13184 srcAllocIndex = SIZE_MAX;
13194 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13197 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13199 if(m_Blocks[i]->m_HasNonMovableAllocations)
13207 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13208 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13209 VkDeviceSize maxBytesToMove,
13210 uint32_t maxAllocationsToMove)
13212 if(!m_AllAllocations && m_AllocationCount == 0)
13217 const size_t blockCount = m_Blocks.size();
13218 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13220 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13222 if(m_AllAllocations)
13224 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13225 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13226 it != pMetadata->m_Suballocations.end();
13229 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13231 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13232 pBlockInfo->m_Allocations.push_back(allocInfo);
13237 pBlockInfo->CalcHasNonMovableAllocations();
13241 pBlockInfo->SortAllocationsByOffsetDescending();
13247 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
13250 const uint32_t roundCount = 2;
13253 VkResult result = VK_SUCCESS;
13254 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
13256 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
13262 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
13263 size_t dstBlockIndex, VkDeviceSize dstOffset,
13264 size_t srcBlockIndex, VkDeviceSize srcOffset)
13266 if(dstBlockIndex < srcBlockIndex)
13270 if(dstBlockIndex > srcBlockIndex)
13274 if(dstOffset < srcOffset)
13284 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
13286 VmaBlockVector* pBlockVector,
13287 uint32_t currentFrameIndex,
13288 bool overlappingMoveSupported) :
13289 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13290 m_OverlappingMoveSupported(overlappingMoveSupported),
13291 m_AllocationCount(0),
13292 m_AllAllocations(false),
13294 m_AllocationsMoved(0),
13295 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
13297 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
13301 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
13305 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
13306 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13307 VkDeviceSize maxBytesToMove,
13308 uint32_t maxAllocationsToMove)
13310 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
13312 const size_t blockCount = m_pBlockVector->GetBlockCount();
13313 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
13318 PreprocessMetadata();
13322 m_BlockInfos.resize(blockCount);
13323 for(
size_t i = 0; i < blockCount; ++i)
13325 m_BlockInfos[i].origBlockIndex = i;
13328 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13329 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13330 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13335 FreeSpaceDatabase freeSpaceDb;
13337 size_t dstBlockInfoIndex = 0;
13338 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13339 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13340 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13341 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13342 VkDeviceSize dstOffset = 0;
13345 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13347 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13348 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13349 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13350 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13351 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13353 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13354 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13355 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13356 if(m_AllocationsMoved == maxAllocationsToMove ||
13357 m_BytesMoved + srcAllocSize > maxBytesToMove)
13362 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13365 size_t freeSpaceInfoIndex;
13366 VkDeviceSize dstAllocOffset;
13367 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13368 freeSpaceInfoIndex, dstAllocOffset))
13370 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13371 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13372 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13375 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13377 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13381 VmaSuballocation suballoc = *srcSuballocIt;
13382 suballoc.offset = dstAllocOffset;
13383 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13384 m_BytesMoved += srcAllocSize;
13385 ++m_AllocationsMoved;
13387 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13389 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13390 srcSuballocIt = nextSuballocIt;
13392 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13394 VmaDefragmentationMove move = {
13395 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13396 srcAllocOffset, dstAllocOffset,
13398 moves.push_back(move);
13405 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13407 VmaSuballocation suballoc = *srcSuballocIt;
13408 suballoc.offset = dstAllocOffset;
13409 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13410 m_BytesMoved += srcAllocSize;
13411 ++m_AllocationsMoved;
13413 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13415 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13416 srcSuballocIt = nextSuballocIt;
13418 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13420 VmaDefragmentationMove move = {
13421 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13422 srcAllocOffset, dstAllocOffset,
13424 moves.push_back(move);
13429 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13432 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13433 dstAllocOffset + srcAllocSize > dstBlockSize)
13436 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13438 ++dstBlockInfoIndex;
13439 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13440 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13441 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13442 dstBlockSize = pDstMetadata->GetSize();
13444 dstAllocOffset = 0;
13448 if(dstBlockInfoIndex == srcBlockInfoIndex)
13450 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13452 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13454 bool skipOver = overlap;
13455 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13459 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13464 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13466 dstOffset = srcAllocOffset + srcAllocSize;
13472 srcSuballocIt->offset = dstAllocOffset;
13473 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13474 dstOffset = dstAllocOffset + srcAllocSize;
13475 m_BytesMoved += srcAllocSize;
13476 ++m_AllocationsMoved;
13478 VmaDefragmentationMove move = {
13479 srcOrigBlockIndex, dstOrigBlockIndex,
13480 srcAllocOffset, dstAllocOffset,
13482 moves.push_back(move);
13490 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13491 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13493 VmaSuballocation suballoc = *srcSuballocIt;
13494 suballoc.offset = dstAllocOffset;
13495 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13496 dstOffset = dstAllocOffset + srcAllocSize;
13497 m_BytesMoved += srcAllocSize;
13498 ++m_AllocationsMoved;
13500 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13502 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13503 srcSuballocIt = nextSuballocIt;
13505 pDstMetadata->m_Suballocations.push_back(suballoc);
13507 VmaDefragmentationMove move = {
13508 srcOrigBlockIndex, dstOrigBlockIndex,
13509 srcAllocOffset, dstAllocOffset,
13511 moves.push_back(move);
13517 m_BlockInfos.clear();
13519 PostprocessMetadata();
13524 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13526 const size_t blockCount = m_pBlockVector->GetBlockCount();
13527 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13529 VmaBlockMetadata_Generic*
const pMetadata =
13530 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13531 pMetadata->m_FreeCount = 0;
13532 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13533 pMetadata->m_FreeSuballocationsBySize.clear();
13534 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13535 it != pMetadata->m_Suballocations.end(); )
13537 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13539 VmaSuballocationList::iterator nextIt = it;
13541 pMetadata->m_Suballocations.erase(it);
13552 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13554 const size_t blockCount = m_pBlockVector->GetBlockCount();
13555 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13557 VmaBlockMetadata_Generic*
const pMetadata =
13558 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13559 const VkDeviceSize blockSize = pMetadata->GetSize();
13562 if(pMetadata->m_Suballocations.empty())
13564 pMetadata->m_FreeCount = 1;
13566 VmaSuballocation suballoc = {
13570 VMA_SUBALLOCATION_TYPE_FREE };
13571 pMetadata->m_Suballocations.push_back(suballoc);
13572 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13577 VkDeviceSize offset = 0;
13578 VmaSuballocationList::iterator it;
13579 for(it = pMetadata->m_Suballocations.begin();
13580 it != pMetadata->m_Suballocations.end();
13583 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13584 VMA_ASSERT(it->offset >= offset);
13587 if(it->offset > offset)
13589 ++pMetadata->m_FreeCount;
13590 const VkDeviceSize freeSize = it->offset - offset;
13591 VmaSuballocation suballoc = {
13595 VMA_SUBALLOCATION_TYPE_FREE };
13596 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13597 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13599 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13603 pMetadata->m_SumFreeSize -= it->size;
13604 offset = it->offset + it->size;
13608 if(offset < blockSize)
13610 ++pMetadata->m_FreeCount;
13611 const VkDeviceSize freeSize = blockSize - offset;
13612 VmaSuballocation suballoc = {
13616 VMA_SUBALLOCATION_TYPE_FREE };
13617 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13618 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13619 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13621 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13626 pMetadata->m_FreeSuballocationsBySize.begin(),
13627 pMetadata->m_FreeSuballocationsBySize.end(),
13628 VmaSuballocationItemSizeLess());
13631 VMA_HEAVY_ASSERT(pMetadata->Validate());
13635 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13638 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13639 while(it != pMetadata->m_Suballocations.end())
13641 if(it->offset < suballoc.offset)
13646 pMetadata->m_Suballocations.insert(it, suballoc);
13652 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13655 VmaBlockVector* pBlockVector,
13656 uint32_t currFrameIndex) :
13658 mutexLocked(false),
13659 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13660 m_hAllocator(hAllocator),
13661 m_hCustomPool(hCustomPool),
13662 m_pBlockVector(pBlockVector),
13663 m_CurrFrameIndex(currFrameIndex),
13664 m_pAlgorithm(VMA_NULL),
13665 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13666 m_AllAllocations(false)
13670 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13672 vma_delete(m_hAllocator, m_pAlgorithm);
13675 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13677 AllocInfo info = { hAlloc, pChanged };
13678 m_Allocations.push_back(info);
13681 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13683 const bool allAllocations = m_AllAllocations ||
13684 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13696 if(VMA_DEBUG_MARGIN == 0 &&
13698 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13700 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13701 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13705 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13706 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13711 m_pAlgorithm->AddAll();
13715 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13717 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13725 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13727 uint32_t currFrameIndex,
13730 m_hAllocator(hAllocator),
13731 m_CurrFrameIndex(currFrameIndex),
13734 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13736 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13739 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13741 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13743 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13744 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13745 vma_delete(m_hAllocator, pBlockVectorCtx);
13747 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13749 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13750 if(pBlockVectorCtx)
13752 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13753 vma_delete(m_hAllocator, pBlockVectorCtx);
13758 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13760 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13762 VmaPool pool = pPools[poolIndex];
13765 if(pool->m_BlockVector.GetAlgorithm() == 0)
13767 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13769 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13771 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13773 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13778 if(!pBlockVectorDefragCtx)
13780 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13783 &pool->m_BlockVector,
13785 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13788 pBlockVectorDefragCtx->AddAll();
13793 void VmaDefragmentationContext_T::AddAllocations(
13794 uint32_t allocationCount,
13796 VkBool32* pAllocationsChanged)
13799 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13802 VMA_ASSERT(hAlloc);
13804 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13806 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13808 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13810 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13812 if(hAllocPool != VK_NULL_HANDLE)
13815 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13817 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13819 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13821 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13825 if(!pBlockVectorDefragCtx)
13827 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13830 &hAllocPool->m_BlockVector,
13832 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13839 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13840 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13841 if(!pBlockVectorDefragCtx)
13843 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13846 m_hAllocator->m_pBlockVectors[memTypeIndex],
13848 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13852 if(pBlockVectorDefragCtx)
13854 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13855 &pAllocationsChanged[allocIndex] : VMA_NULL;
13856 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13862 VkResult VmaDefragmentationContext_T::Defragment(
13863 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13864 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13872 if(commandBuffer == VK_NULL_HANDLE)
13874 maxGpuBytesToMove = 0;
13875 maxGpuAllocationsToMove = 0;
13878 VkResult res = VK_SUCCESS;
13881 for(uint32_t memTypeIndex = 0;
13882 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13885 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13886 if(pBlockVectorCtx)
13888 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13889 pBlockVectorCtx->GetBlockVector()->Defragment(
13892 maxCpuBytesToMove, maxCpuAllocationsToMove,
13893 maxGpuBytesToMove, maxGpuAllocationsToMove,
13895 if(pBlockVectorCtx->res != VK_SUCCESS)
13897 res = pBlockVectorCtx->res;
13903 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13904 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13907 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13908 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13909 pBlockVectorCtx->GetBlockVector()->Defragment(
13912 maxCpuBytesToMove, maxCpuAllocationsToMove,
13913 maxGpuBytesToMove, maxGpuAllocationsToMove,
13915 if(pBlockVectorCtx->res != VK_SUCCESS)
13917 res = pBlockVectorCtx->res;
13927 #if VMA_RECORDING_ENABLED
13929 VmaRecorder::VmaRecorder() :
13934 m_StartCounter(INT64_MAX)
13940 m_UseMutex = useMutex;
13941 m_Flags = settings.
flags;
13943 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13944 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13947 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13950 return VK_ERROR_INITIALIZATION_FAILED;
13954 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13955 fprintf(m_File,
"%s\n",
"1,8");
13960 VmaRecorder::~VmaRecorder()
13962 if(m_File != VMA_NULL)
13968 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13970 CallParams callParams;
13971 GetBasicParams(callParams);
13973 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13974 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13978 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13980 CallParams callParams;
13981 GetBasicParams(callParams);
13983 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13984 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13990 CallParams callParams;
13991 GetBasicParams(callParams);
13993 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13994 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
14005 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
14007 CallParams callParams;
14008 GetBasicParams(callParams);
14010 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14011 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
14016 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
14017 const VkMemoryRequirements& vkMemReq,
14021 CallParams callParams;
14022 GetBasicParams(callParams);
14024 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14025 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14026 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14028 vkMemReq.alignment,
14029 vkMemReq.memoryTypeBits,
14037 userDataStr.GetString());
14041 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
14042 const VkMemoryRequirements& vkMemReq,
14044 uint64_t allocationCount,
14047 CallParams callParams;
14048 GetBasicParams(callParams);
14050 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14051 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14052 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14054 vkMemReq.alignment,
14055 vkMemReq.memoryTypeBits,
14062 PrintPointerList(allocationCount, pAllocations);
14063 fprintf(m_File,
",%s\n", userDataStr.GetString());
14067 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
14068 const VkMemoryRequirements& vkMemReq,
14069 bool requiresDedicatedAllocation,
14070 bool prefersDedicatedAllocation,
14074 CallParams callParams;
14075 GetBasicParams(callParams);
14077 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14078 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14079 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14081 vkMemReq.alignment,
14082 vkMemReq.memoryTypeBits,
14083 requiresDedicatedAllocation ? 1 : 0,
14084 prefersDedicatedAllocation ? 1 : 0,
14092 userDataStr.GetString());
14096 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
14097 const VkMemoryRequirements& vkMemReq,
14098 bool requiresDedicatedAllocation,
14099 bool prefersDedicatedAllocation,
14103 CallParams callParams;
14104 GetBasicParams(callParams);
14106 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14107 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14108 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14110 vkMemReq.alignment,
14111 vkMemReq.memoryTypeBits,
14112 requiresDedicatedAllocation ? 1 : 0,
14113 prefersDedicatedAllocation ? 1 : 0,
14121 userDataStr.GetString());
14125 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
14128 CallParams callParams;
14129 GetBasicParams(callParams);
14131 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14132 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14137 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
14138 uint64_t allocationCount,
14141 CallParams callParams;
14142 GetBasicParams(callParams);
14144 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14145 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
14146 PrintPointerList(allocationCount, pAllocations);
14147 fprintf(m_File,
"\n");
14151 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
14153 const void* pUserData)
14155 CallParams callParams;
14156 GetBasicParams(callParams);
14158 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14159 UserDataString userDataStr(
14162 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14164 userDataStr.GetString());
14168 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
14171 CallParams callParams;
14172 GetBasicParams(callParams);
14174 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14175 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14180 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
14183 CallParams callParams;
14184 GetBasicParams(callParams);
14186 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14187 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14192 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
14195 CallParams callParams;
14196 GetBasicParams(callParams);
14198 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14199 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14204 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
14205 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14207 CallParams callParams;
14208 GetBasicParams(callParams);
14210 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14211 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14218 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
14219 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14221 CallParams callParams;
14222 GetBasicParams(callParams);
14224 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14225 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14232 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
14233 const VkBufferCreateInfo& bufCreateInfo,
14237 CallParams callParams;
14238 GetBasicParams(callParams);
14240 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14241 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14242 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14243 bufCreateInfo.flags,
14244 bufCreateInfo.size,
14245 bufCreateInfo.usage,
14246 bufCreateInfo.sharingMode,
14247 allocCreateInfo.
flags,
14248 allocCreateInfo.
usage,
14252 allocCreateInfo.
pool,
14254 userDataStr.GetString());
14258 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
14259 const VkImageCreateInfo& imageCreateInfo,
14263 CallParams callParams;
14264 GetBasicParams(callParams);
14266 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14267 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14268 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14269 imageCreateInfo.flags,
14270 imageCreateInfo.imageType,
14271 imageCreateInfo.format,
14272 imageCreateInfo.extent.width,
14273 imageCreateInfo.extent.height,
14274 imageCreateInfo.extent.depth,
14275 imageCreateInfo.mipLevels,
14276 imageCreateInfo.arrayLayers,
14277 imageCreateInfo.samples,
14278 imageCreateInfo.tiling,
14279 imageCreateInfo.usage,
14280 imageCreateInfo.sharingMode,
14281 imageCreateInfo.initialLayout,
14282 allocCreateInfo.
flags,
14283 allocCreateInfo.
usage,
14287 allocCreateInfo.
pool,
14289 userDataStr.GetString());
14293 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
14296 CallParams callParams;
14297 GetBasicParams(callParams);
14299 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14300 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
14305 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14308 CallParams callParams;
14309 GetBasicParams(callParams);
14311 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14312 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
14317 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
14320 CallParams callParams;
14321 GetBasicParams(callParams);
14323 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14324 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14329 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14332 CallParams callParams;
14333 GetBasicParams(callParams);
14335 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14336 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14341 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14344 CallParams callParams;
14345 GetBasicParams(callParams);
14347 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14348 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14353 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14357 CallParams callParams;
14358 GetBasicParams(callParams);
14360 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14361 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14364 fprintf(m_File,
",");
14366 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14376 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14379 CallParams callParams;
14380 GetBasicParams(callParams);
14382 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14383 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14388 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
14392 CallParams callParams;
14393 GetBasicParams(callParams);
14395 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14396 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14397 pool, name != VMA_NULL ? name :
"");
14403 if(pUserData != VMA_NULL)
14407 m_Str = (
const char*)pUserData;
14411 sprintf_s(m_PtrStr,
"%p", pUserData);
14421 void VmaRecorder::WriteConfiguration(
14422 const VkPhysicalDeviceProperties& devProps,
14423 const VkPhysicalDeviceMemoryProperties& memProps,
14424 uint32_t vulkanApiVersion,
14425 bool dedicatedAllocationExtensionEnabled,
14426 bool bindMemory2ExtensionEnabled,
14427 bool memoryBudgetExtensionEnabled)
14429 fprintf(m_File,
"Config,Begin\n");
14431 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
14433 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14434 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14435 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14436 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14437 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14438 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14440 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14441 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14442 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14444 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14445 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14447 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14448 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14450 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14451 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14453 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14454 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14457 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14458 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
14459 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
14461 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14462 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14463 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14464 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14465 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14466 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14467 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14468 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14469 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14471 fprintf(m_File,
"Config,End\n");
14474 void VmaRecorder::GetBasicParams(CallParams& outParams)
14476 outParams.threadId = GetCurrentThreadId();
14478 LARGE_INTEGER counter;
14479 QueryPerformanceCounter(&counter);
14480 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14483 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14487 fprintf(m_File,
"%p", pItems[0]);
14488 for(uint64_t i = 1; i < count; ++i)
14490 fprintf(m_File,
" %p", pItems[i]);
14495 void VmaRecorder::Flush()
14503 #endif // #if VMA_RECORDING_ENABLED
14508 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14509 m_Allocator(pAllocationCallbacks, 1024)
14515 VmaMutexLock mutexLock(m_Mutex);
14516 return m_Allocator.Alloc();
14519 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14521 VmaMutexLock mutexLock(m_Mutex);
14522 m_Allocator.Free(hAlloc);
14530 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
14534 m_hDevice(pCreateInfo->device),
14535 m_hInstance(pCreateInfo->instance),
14536 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14537 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14538 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14539 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14540 m_HeapSizeLimitMask(0),
14541 m_PreferredLargeHeapBlockSize(0),
14542 m_PhysicalDevice(pCreateInfo->physicalDevice),
14543 m_CurrentFrameIndex(0),
14544 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14545 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14548 ,m_pRecorder(VMA_NULL)
14551 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14553 m_UseKhrDedicatedAllocation =
false;
14554 m_UseKhrBindMemory2 =
false;
14557 if(VMA_DEBUG_DETECT_CORRUPTION)
14560 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14565 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
14567 #if !(VMA_DEDICATED_ALLOCATION)
14570 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14573 #if !(VMA_BIND_MEMORY2)
14576 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
14580 #if !(VMA_MEMORY_BUDGET)
14583 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
14586 #if VMA_VULKAN_VERSION < 1001000
14587 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14589 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
14593 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14594 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14595 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14597 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14598 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14599 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
14609 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14610 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14612 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14613 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14614 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14615 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14622 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14624 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14625 if(limit != VK_WHOLE_SIZE)
14627 m_HeapSizeLimitMask |= 1u << heapIndex;
14628 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14630 m_MemProps.memoryHeaps[heapIndex].size = limit;
14636 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14638 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14640 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14644 preferredBlockSize,
14647 GetBufferImageGranularity(),
14653 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14660 VkResult res = VK_SUCCESS;
14665 #if VMA_RECORDING_ENABLED
14666 m_pRecorder = vma_new(
this, VmaRecorder)();
14668 if(res != VK_SUCCESS)
14672 m_pRecorder->WriteConfiguration(
14673 m_PhysicalDeviceProperties,
14675 m_VulkanApiVersion,
14676 m_UseKhrDedicatedAllocation,
14677 m_UseKhrBindMemory2,
14678 m_UseExtMemoryBudget);
14679 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14681 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14682 return VK_ERROR_FEATURE_NOT_PRESENT;
14686 #if VMA_MEMORY_BUDGET
14687 if(m_UseExtMemoryBudget)
14689 UpdateVulkanBudget();
14691 #endif // #if VMA_MEMORY_BUDGET
14696 VmaAllocator_T::~VmaAllocator_T()
14698 #if VMA_RECORDING_ENABLED
14699 if(m_pRecorder != VMA_NULL)
14701 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14702 vma_delete(
this, m_pRecorder);
14706 VMA_ASSERT(m_Pools.empty());
14708 for(
size_t i = GetMemoryTypeCount(); i--; )
14710 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14712 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14715 vma_delete(
this, m_pDedicatedAllocations[i]);
14716 vma_delete(
this, m_pBlockVectors[i]);
14720 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14722 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14723 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14724 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14725 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14726 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14727 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14728 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14729 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14730 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14731 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14732 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14733 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14734 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14735 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14736 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14737 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14738 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14739 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14740 #if VMA_VULKAN_VERSION >= 1001000
14741 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14743 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
14744 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14745 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2");
14746 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14747 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2");
14748 m_VulkanFunctions.vkBindBufferMemory2KHR =
14749 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2");
14750 m_VulkanFunctions.vkBindImageMemory2KHR =
14751 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2");
14752 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
14753 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2");
14756 #if VMA_DEDICATED_ALLOCATION
14757 if(m_UseKhrDedicatedAllocation)
14759 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14760 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14761 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14762 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14765 #if VMA_BIND_MEMORY2
14766 if(m_UseKhrBindMemory2)
14768 m_VulkanFunctions.vkBindBufferMemory2KHR =
14769 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2KHR");
14770 m_VulkanFunctions.vkBindImageMemory2KHR =
14771 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2KHR");
14773 #endif // #if VMA_BIND_MEMORY2
14774 #if VMA_MEMORY_BUDGET
14775 if(m_UseExtMemoryBudget && m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
14777 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
14778 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
14779 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2KHR");
14781 #endif // #if VMA_MEMORY_BUDGET
14782 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
14784 #define VMA_COPY_IF_NOT_NULL(funcName) \
14785 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
14787 if(pVulkanFunctions != VMA_NULL)
14789 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14790 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14791 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14792 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14793 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14794 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14795 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14796 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14797 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14798 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14799 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14800 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14801 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14802 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14803 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14804 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14805 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14806 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
14807 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14808 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14810 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
14811 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
14812 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
14814 #if VMA_MEMORY_BUDGET
14815 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
14819 #undef VMA_COPY_IF_NOT_NULL
14823 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14824 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14825 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14826 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14827 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14828 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14829 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14830 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14831 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14832 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14833 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14834 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14835 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14836 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14837 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14838 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14839 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14840 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
14841 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
14843 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14844 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14847 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
14848 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
14850 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
14851 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
14854 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
14855 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
14857 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
14862 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14864 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14865 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14866 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14867 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
14870 VkResult VmaAllocator_T::AllocateMemoryOfType(
14872 VkDeviceSize alignment,
14873 bool dedicatedAllocation,
14874 VkBuffer dedicatedBuffer,
14875 VkImage dedicatedImage,
14877 uint32_t memTypeIndex,
14878 VmaSuballocationType suballocType,
14879 size_t allocationCount,
14882 VMA_ASSERT(pAllocations != VMA_NULL);
14883 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14889 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14899 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14900 VMA_ASSERT(blockVector);
14902 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14903 bool preferDedicatedMemory =
14904 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14905 dedicatedAllocation ||
14907 size > preferredBlockSize / 2;
14909 if(preferDedicatedMemory &&
14911 finalCreateInfo.
pool == VK_NULL_HANDLE)
14920 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14924 return AllocateDedicatedMemory(
14940 VkResult res = blockVector->Allocate(
14941 m_CurrentFrameIndex.load(),
14948 if(res == VK_SUCCESS)
14956 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14960 res = AllocateDedicatedMemory(
14967 finalCreateInfo.pUserData,
14972 if(res == VK_SUCCESS)
14975 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14981 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14988 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14990 VmaSuballocationType suballocType,
14991 uint32_t memTypeIndex,
14994 bool isUserDataString,
14996 VkBuffer dedicatedBuffer,
14997 VkImage dedicatedImage,
14998 size_t allocationCount,
15001 VMA_ASSERT(allocationCount > 0 && pAllocations);
15005 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15007 GetBudget(&heapBudget, heapIndex, 1);
15008 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
15010 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15014 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
15015 allocInfo.memoryTypeIndex = memTypeIndex;
15016 allocInfo.allocationSize = size;
15018 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15019 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
15020 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15022 if(dedicatedBuffer != VK_NULL_HANDLE)
15024 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
15025 dedicatedAllocInfo.buffer = dedicatedBuffer;
15026 allocInfo.pNext = &dedicatedAllocInfo;
15028 else if(dedicatedImage != VK_NULL_HANDLE)
15030 dedicatedAllocInfo.image = dedicatedImage;
15031 allocInfo.pNext = &dedicatedAllocInfo;
15034 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15037 VkResult res = VK_SUCCESS;
15038 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15040 res = AllocateDedicatedMemoryPage(
15048 pAllocations + allocIndex);
15049 if(res != VK_SUCCESS)
15055 if(res == VK_SUCCESS)
15059 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15060 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15061 VMA_ASSERT(pDedicatedAllocations);
15062 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15064 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
15068 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
15073 while(allocIndex--)
15076 VkDeviceMemory hMemory = currAlloc->GetMemory();
15088 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
15089 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
15090 currAlloc->SetUserData(
this, VMA_NULL);
15092 m_AllocationObjectAllocator.Free(currAlloc);
15095 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15101 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
15103 VmaSuballocationType suballocType,
15104 uint32_t memTypeIndex,
15105 const VkMemoryAllocateInfo& allocInfo,
15107 bool isUserDataString,
15111 VkDeviceMemory hMemory = VK_NULL_HANDLE;
15112 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
15115 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15119 void* pMappedData = VMA_NULL;
15122 res = (*m_VulkanFunctions.vkMapMemory)(
15131 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
15132 FreeVulkanMemory(memTypeIndex, size, hMemory);
15137 *pAllocation = m_AllocationObjectAllocator.Allocate();
15138 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
15139 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
15140 (*pAllocation)->SetUserData(
this, pUserData);
15141 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
15142 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15144 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
15150 void VmaAllocator_T::GetBufferMemoryRequirements(
15152 VkMemoryRequirements& memReq,
15153 bool& requiresDedicatedAllocation,
15154 bool& prefersDedicatedAllocation)
const
15156 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15157 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15159 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
15160 memReqInfo.buffer = hBuffer;
15162 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15164 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15165 memReq2.pNext = &memDedicatedReq;
15167 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15169 memReq = memReq2.memoryRequirements;
15170 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15171 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15174 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15176 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
15177 requiresDedicatedAllocation =
false;
15178 prefersDedicatedAllocation =
false;
15182 void VmaAllocator_T::GetImageMemoryRequirements(
15184 VkMemoryRequirements& memReq,
15185 bool& requiresDedicatedAllocation,
15186 bool& prefersDedicatedAllocation)
const
15188 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15189 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15191 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
15192 memReqInfo.image = hImage;
15194 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15196 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15197 memReq2.pNext = &memDedicatedReq;
15199 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15201 memReq = memReq2.memoryRequirements;
15202 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15203 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15206 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15208 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
15209 requiresDedicatedAllocation =
false;
15210 prefersDedicatedAllocation =
false;
15214 VkResult VmaAllocator_T::AllocateMemory(
15215 const VkMemoryRequirements& vkMemReq,
15216 bool requiresDedicatedAllocation,
15217 bool prefersDedicatedAllocation,
15218 VkBuffer dedicatedBuffer,
15219 VkImage dedicatedImage,
15221 VmaSuballocationType suballocType,
15222 size_t allocationCount,
15225 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15227 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
15229 if(vkMemReq.size == 0)
15231 return VK_ERROR_VALIDATION_FAILED_EXT;
15236 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
15237 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15242 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
15243 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15245 if(requiresDedicatedAllocation)
15249 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
15250 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15252 if(createInfo.
pool != VK_NULL_HANDLE)
15254 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
15255 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15258 if((createInfo.
pool != VK_NULL_HANDLE) &&
15261 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
15262 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15265 if(createInfo.
pool != VK_NULL_HANDLE)
15267 const VkDeviceSize alignmentForPool = VMA_MAX(
15268 vkMemReq.alignment,
15269 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
15274 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15279 return createInfo.
pool->m_BlockVector.Allocate(
15280 m_CurrentFrameIndex.load(),
15291 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
15292 uint32_t memTypeIndex = UINT32_MAX;
15294 if(res == VK_SUCCESS)
15296 VkDeviceSize alignmentForMemType = VMA_MAX(
15297 vkMemReq.alignment,
15298 GetMemoryTypeMinAlignment(memTypeIndex));
15300 res = AllocateMemoryOfType(
15302 alignmentForMemType,
15303 requiresDedicatedAllocation || prefersDedicatedAllocation,
15312 if(res == VK_SUCCESS)
15322 memoryTypeBits &= ~(1u << memTypeIndex);
15325 if(res == VK_SUCCESS)
15327 alignmentForMemType = VMA_MAX(
15328 vkMemReq.alignment,
15329 GetMemoryTypeMinAlignment(memTypeIndex));
15331 res = AllocateMemoryOfType(
15333 alignmentForMemType,
15334 requiresDedicatedAllocation || prefersDedicatedAllocation,
15343 if(res == VK_SUCCESS)
15353 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15364 void VmaAllocator_T::FreeMemory(
15365 size_t allocationCount,
15368 VMA_ASSERT(pAllocations);
15370 for(
size_t allocIndex = allocationCount; allocIndex--; )
15374 if(allocation != VK_NULL_HANDLE)
15376 if(TouchAllocation(allocation))
15378 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15380 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
15383 switch(allocation->GetType())
15385 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15387 VmaBlockVector* pBlockVector = VMA_NULL;
15388 VmaPool hPool = allocation->GetBlock()->GetParentPool();
15389 if(hPool != VK_NULL_HANDLE)
15391 pBlockVector = &hPool->m_BlockVector;
15395 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15396 pBlockVector = m_pBlockVectors[memTypeIndex];
15398 pBlockVector->Free(allocation);
15401 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15402 FreeDedicatedMemory(allocation);
15410 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
15411 allocation->SetUserData(
this, VMA_NULL);
15412 allocation->Dtor();
15413 m_AllocationObjectAllocator.Free(allocation);
15418 VkResult VmaAllocator_T::ResizeAllocation(
15420 VkDeviceSize newSize)
15423 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
15425 return VK_ERROR_VALIDATION_FAILED_EXT;
15427 if(newSize == alloc->GetSize())
15431 return VK_ERROR_OUT_OF_POOL_MEMORY;
15434 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
15437 InitStatInfo(pStats->
total);
15438 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
15440 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
15444 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15446 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15447 VMA_ASSERT(pBlockVector);
15448 pBlockVector->AddStats(pStats);
15453 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15454 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15456 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
15461 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15463 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15464 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15465 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15466 VMA_ASSERT(pDedicatedAllocVector);
15467 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
15470 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
15471 VmaAddStatInfo(pStats->
total, allocationStatInfo);
15472 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
15473 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15478 VmaPostprocessCalcStatInfo(pStats->
total);
15479 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15480 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15481 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15482 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15485 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
15487 #if VMA_MEMORY_BUDGET
15488 if(m_UseExtMemoryBudget)
15490 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
15492 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
15493 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15495 const uint32_t heapIndex = firstHeap + i;
15497 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15500 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
15502 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
15503 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
15507 outBudget->
usage = 0;
15511 outBudget->
budget = VMA_MIN(
15512 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
15517 UpdateVulkanBudget();
15518 GetBudget(outBudget, firstHeap, heapCount);
15524 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15526 const uint32_t heapIndex = firstHeap + i;
15528 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15532 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
15537 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15539 VkResult VmaAllocator_T::DefragmentationBegin(
15549 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15550 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15553 (*pContext)->AddAllocations(
15556 VkResult res = (*pContext)->Defragment(
15561 if(res != VK_NOT_READY)
15563 vma_delete(
this, *pContext);
15564 *pContext = VMA_NULL;
15570 VkResult VmaAllocator_T::DefragmentationEnd(
15573 vma_delete(
this, context);
15579 if(hAllocation->CanBecomeLost())
15585 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15586 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15589 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15593 pAllocationInfo->
offset = 0;
15594 pAllocationInfo->
size = hAllocation->GetSize();
15596 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15599 else if(localLastUseFrameIndex == localCurrFrameIndex)
15601 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15602 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15603 pAllocationInfo->
offset = hAllocation->GetOffset();
15604 pAllocationInfo->
size = hAllocation->GetSize();
15606 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15611 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15613 localLastUseFrameIndex = localCurrFrameIndex;
15620 #if VMA_STATS_STRING_ENABLED
15621 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15622 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15625 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15626 if(localLastUseFrameIndex == localCurrFrameIndex)
15632 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15634 localLastUseFrameIndex = localCurrFrameIndex;
15640 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15641 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15642 pAllocationInfo->
offset = hAllocation->GetOffset();
15643 pAllocationInfo->
size = hAllocation->GetSize();
15644 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15645 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15649 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15652 if(hAllocation->CanBecomeLost())
15654 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15655 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15658 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15662 else if(localLastUseFrameIndex == localCurrFrameIndex)
15668 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15670 localLastUseFrameIndex = localCurrFrameIndex;
15677 #if VMA_STATS_STRING_ENABLED
15678 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15679 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15682 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15683 if(localLastUseFrameIndex == localCurrFrameIndex)
15689 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15691 localLastUseFrameIndex = localCurrFrameIndex;
15703 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15713 return VK_ERROR_INITIALIZATION_FAILED;
15716 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15718 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15720 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15721 if(res != VK_SUCCESS)
15723 vma_delete(
this, *pPool);
15730 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15731 (*pPool)->SetId(m_NextPoolId++);
15732 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15738 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15742 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15743 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15744 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15747 vma_delete(
this, pool);
15752 pool->m_BlockVector.GetPoolStats(pPoolStats);
15755 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15757 m_CurrentFrameIndex.store(frameIndex);
15759 #if VMA_MEMORY_BUDGET
15760 if(m_UseExtMemoryBudget)
15762 UpdateVulkanBudget();
15764 #endif // #if VMA_MEMORY_BUDGET
15767 void VmaAllocator_T::MakePoolAllocationsLost(
15769 size_t* pLostAllocationCount)
15771 hPool->m_BlockVector.MakePoolAllocationsLost(
15772 m_CurrentFrameIndex.load(),
15773 pLostAllocationCount);
15776 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15778 return hPool->m_BlockVector.CheckCorruption();
15781 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15783 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15786 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15788 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15790 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15791 VMA_ASSERT(pBlockVector);
15792 VkResult localRes = pBlockVector->CheckCorruption();
15795 case VK_ERROR_FEATURE_NOT_PRESENT:
15798 finalRes = VK_SUCCESS;
15808 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15809 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15811 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15813 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15816 case VK_ERROR_FEATURE_NOT_PRESENT:
15819 finalRes = VK_SUCCESS;
15831 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15833 *pAllocation = m_AllocationObjectAllocator.Allocate();
15834 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15835 (*pAllocation)->InitLost();
15838 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15840 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15843 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
15845 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15846 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
15849 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
15850 if(blockBytesAfterAllocation > heapSize)
15852 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15854 if(m_Budget.m_BlockBytes->compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
15862 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
15866 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15868 if(res == VK_SUCCESS)
15870 #if VMA_MEMORY_BUDGET
15871 ++m_Budget.m_OperationsSinceBudgetFetch;
15875 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15877 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15882 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
15888 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15891 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15893 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15897 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15899 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
15902 VkResult VmaAllocator_T::BindVulkanBuffer(
15903 VkDeviceMemory memory,
15904 VkDeviceSize memoryOffset,
15908 if(pNext != VMA_NULL)
15910 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
15911 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
15912 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
15914 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
15915 bindBufferMemoryInfo.pNext = pNext;
15916 bindBufferMemoryInfo.buffer = buffer;
15917 bindBufferMemoryInfo.memory = memory;
15918 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15919 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15922 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
15924 return VK_ERROR_EXTENSION_NOT_PRESENT;
15929 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
15933 VkResult VmaAllocator_T::BindVulkanImage(
15934 VkDeviceMemory memory,
15935 VkDeviceSize memoryOffset,
15939 if(pNext != VMA_NULL)
15941 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
15942 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
15943 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
15945 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
15946 bindBufferMemoryInfo.pNext = pNext;
15947 bindBufferMemoryInfo.image = image;
15948 bindBufferMemoryInfo.memory = memory;
15949 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15950 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15953 #endif // #if VMA_BIND_MEMORY2
15955 return VK_ERROR_EXTENSION_NOT_PRESENT;
15960 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
15964 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15966 if(hAllocation->CanBecomeLost())
15968 return VK_ERROR_MEMORY_MAP_FAILED;
15971 switch(hAllocation->GetType())
15973 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15975 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15976 char *pBytes = VMA_NULL;
15977 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15978 if(res == VK_SUCCESS)
15980 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15981 hAllocation->BlockAllocMap();
15985 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15986 return hAllocation->DedicatedAllocMap(
this, ppData);
15989 return VK_ERROR_MEMORY_MAP_FAILED;
15995 switch(hAllocation->GetType())
15997 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15999 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16000 hAllocation->BlockAllocUnmap();
16001 pBlock->Unmap(
this, 1);
16004 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16005 hAllocation->DedicatedAllocUnmap(
this);
16012 VkResult VmaAllocator_T::BindBufferMemory(
16014 VkDeviceSize allocationLocalOffset,
16018 VkResult res = VK_SUCCESS;
16019 switch(hAllocation->GetType())
16021 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16022 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
16024 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16026 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16027 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
16028 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
16037 VkResult VmaAllocator_T::BindImageMemory(
16039 VkDeviceSize allocationLocalOffset,
16043 VkResult res = VK_SUCCESS;
16044 switch(hAllocation->GetType())
16046 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16047 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
16049 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16051 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
16052 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
16053 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
16062 void VmaAllocator_T::FlushOrInvalidateAllocation(
16064 VkDeviceSize offset, VkDeviceSize size,
16065 VMA_CACHE_OPERATION op)
16067 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
16068 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
16070 const VkDeviceSize allocationSize = hAllocation->GetSize();
16071 VMA_ASSERT(offset <= allocationSize);
16073 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
16075 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
16076 memRange.memory = hAllocation->GetMemory();
16078 switch(hAllocation->GetType())
16080 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16081 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16082 if(size == VK_WHOLE_SIZE)
16084 memRange.size = allocationSize - memRange.offset;
16088 VMA_ASSERT(offset + size <= allocationSize);
16089 memRange.size = VMA_MIN(
16090 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
16091 allocationSize - memRange.offset);
16095 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16098 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16099 if(size == VK_WHOLE_SIZE)
16101 size = allocationSize - offset;
16105 VMA_ASSERT(offset + size <= allocationSize);
16107 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
16110 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
16111 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
16112 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
16113 memRange.offset += allocationOffset;
16114 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
16125 case VMA_CACHE_FLUSH:
16126 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
16128 case VMA_CACHE_INVALIDATE:
16129 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
16138 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
16140 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
16142 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16144 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16145 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16146 VMA_ASSERT(pDedicatedAllocations);
16147 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
16148 VMA_ASSERT(success);
16151 VkDeviceMemory hMemory = allocation->GetMemory();
16163 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
16165 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
16168 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
16170 VkBufferCreateInfo dummyBufCreateInfo;
16171 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
16173 uint32_t memoryTypeBits = 0;
16176 VkBuffer buf = VK_NULL_HANDLE;
16177 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
16178 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
16179 if(res == VK_SUCCESS)
16182 VkMemoryRequirements memReq;
16183 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
16184 memoryTypeBits = memReq.memoryTypeBits;
16187 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
16190 return memoryTypeBits;
16193 #if VMA_MEMORY_BUDGET
16195 void VmaAllocator_T::UpdateVulkanBudget()
16197 VMA_ASSERT(m_UseExtMemoryBudget);
16199 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
16201 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
16202 memProps.pNext = &budgetProps;
16204 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
16207 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
16209 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16211 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
16212 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
16213 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
16215 m_Budget.m_OperationsSinceBudgetFetch = 0;
16219 #endif // #if VMA_MEMORY_BUDGET
16221 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
16223 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
16224 !hAllocation->CanBecomeLost() &&
16225 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16227 void* pData = VMA_NULL;
16228 VkResult res = Map(hAllocation, &pData);
16229 if(res == VK_SUCCESS)
16231 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
16232 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
16233 Unmap(hAllocation);
16237 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
16242 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
16244 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
16245 if(memoryTypeBits == UINT32_MAX)
16247 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
16248 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
16250 return memoryTypeBits;
16253 #if VMA_STATS_STRING_ENABLED
16255 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
16257 bool dedicatedAllocationsStarted =
false;
16258 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16260 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16261 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16262 VMA_ASSERT(pDedicatedAllocVector);
16263 if(pDedicatedAllocVector->empty() ==
false)
16265 if(dedicatedAllocationsStarted ==
false)
16267 dedicatedAllocationsStarted =
true;
16268 json.WriteString(
"DedicatedAllocations");
16269 json.BeginObject();
16272 json.BeginString(
"Type ");
16273 json.ContinueString(memTypeIndex);
16278 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
16280 json.BeginObject(
true);
16282 hAlloc->PrintParameters(json);
16289 if(dedicatedAllocationsStarted)
16295 bool allocationsStarted =
false;
16296 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16298 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
16300 if(allocationsStarted ==
false)
16302 allocationsStarted =
true;
16303 json.WriteString(
"DefaultPools");
16304 json.BeginObject();
16307 json.BeginString(
"Type ");
16308 json.ContinueString(memTypeIndex);
16311 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
16314 if(allocationsStarted)
16322 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16323 const size_t poolCount = m_Pools.size();
16326 json.WriteString(
"Pools");
16327 json.BeginObject();
16328 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
16330 json.BeginString();
16331 json.ContinueString(m_Pools[poolIndex]->GetId());
16334 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
16341 #endif // #if VMA_STATS_STRING_ENABLED
16350 VMA_ASSERT(pCreateInfo && pAllocator);
16353 VMA_DEBUG_LOG(
"vmaCreateAllocator");
16355 return (*pAllocator)->Init(pCreateInfo);
16361 if(allocator != VK_NULL_HANDLE)
16363 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
16364 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
16365 vma_delete(&allocationCallbacks, allocator);
16371 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
16373 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
16374 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
16379 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
16381 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
16382 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
16387 uint32_t memoryTypeIndex,
16388 VkMemoryPropertyFlags* pFlags)
16390 VMA_ASSERT(allocator && pFlags);
16391 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
16392 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
16397 uint32_t frameIndex)
16399 VMA_ASSERT(allocator);
16400 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
16402 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16404 allocator->SetCurrentFrameIndex(frameIndex);
16411 VMA_ASSERT(allocator && pStats);
16412 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16413 allocator->CalculateStats(pStats);
16420 VMA_ASSERT(allocator && pBudget);
16421 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16422 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
16425 #if VMA_STATS_STRING_ENABLED
16429 char** ppStatsString,
16430 VkBool32 detailedMap)
16432 VMA_ASSERT(allocator && ppStatsString);
16433 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16435 VmaStringBuilder sb(allocator);
16437 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
16438 json.BeginObject();
16441 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
16444 allocator->CalculateStats(&stats);
16446 json.WriteString(
"Total");
16447 VmaPrintStatInfo(json, stats.
total);
16449 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
16451 json.BeginString(
"Heap ");
16452 json.ContinueString(heapIndex);
16454 json.BeginObject();
16456 json.WriteString(
"Size");
16457 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
16459 json.WriteString(
"Flags");
16460 json.BeginArray(
true);
16461 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
16463 json.WriteString(
"DEVICE_LOCAL");
16467 json.WriteString(
"Budget");
16468 json.BeginObject();
16470 json.WriteString(
"BlockBytes");
16471 json.WriteNumber(budget[heapIndex].blockBytes);
16472 json.WriteString(
"AllocationBytes");
16473 json.WriteNumber(budget[heapIndex].allocationBytes);
16474 json.WriteString(
"Usage");
16475 json.WriteNumber(budget[heapIndex].usage);
16476 json.WriteString(
"Budget");
16477 json.WriteNumber(budget[heapIndex].budget);
16483 json.WriteString(
"Stats");
16484 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
16487 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
16489 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
16491 json.BeginString(
"Type ");
16492 json.ContinueString(typeIndex);
16495 json.BeginObject();
16497 json.WriteString(
"Flags");
16498 json.BeginArray(
true);
16499 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
16500 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
16502 json.WriteString(
"DEVICE_LOCAL");
16504 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16506 json.WriteString(
"HOST_VISIBLE");
16508 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
16510 json.WriteString(
"HOST_COHERENT");
16512 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
16514 json.WriteString(
"HOST_CACHED");
16516 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
16518 json.WriteString(
"LAZILY_ALLOCATED");
16524 json.WriteString(
"Stats");
16525 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
16534 if(detailedMap == VK_TRUE)
16536 allocator->PrintDetailedMap(json);
16542 const size_t len = sb.GetLength();
16543 char*
const pChars = vma_new_array(allocator,
char, len + 1);
16546 memcpy(pChars, sb.GetData(), len);
16548 pChars[len] =
'\0';
16549 *ppStatsString = pChars;
16554 char* pStatsString)
16556 if(pStatsString != VMA_NULL)
16558 VMA_ASSERT(allocator);
16559 size_t len = strlen(pStatsString);
16560 vma_delete_array(allocator, pStatsString, len + 1);
16564 #endif // #if VMA_STATS_STRING_ENABLED
16571 uint32_t memoryTypeBits,
16573 uint32_t* pMemoryTypeIndex)
16575 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16576 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16577 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16584 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
16585 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
16586 uint32_t notPreferredFlags = 0;
16589 switch(pAllocationCreateInfo->
usage)
16594 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16596 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16600 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
16603 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16604 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
16606 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16610 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
16611 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
16614 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
16617 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
16624 *pMemoryTypeIndex = UINT32_MAX;
16625 uint32_t minCost = UINT32_MAX;
16626 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
16627 memTypeIndex < allocator->GetMemoryTypeCount();
16628 ++memTypeIndex, memTypeBit <<= 1)
16631 if((memTypeBit & memoryTypeBits) != 0)
16633 const VkMemoryPropertyFlags currFlags =
16634 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
16636 if((requiredFlags & ~currFlags) == 0)
16639 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
16640 VmaCountBitsSet(currFlags & notPreferredFlags);
16642 if(currCost < minCost)
16644 *pMemoryTypeIndex = memTypeIndex;
16649 minCost = currCost;
16654 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
16659 const VkBufferCreateInfo* pBufferCreateInfo,
16661 uint32_t* pMemoryTypeIndex)
16663 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16664 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16665 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16666 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16668 const VkDevice hDev = allocator->m_hDevice;
16669 VkBuffer hBuffer = VK_NULL_HANDLE;
16670 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16671 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16672 if(res == VK_SUCCESS)
16674 VkMemoryRequirements memReq = {};
16675 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16676 hDev, hBuffer, &memReq);
16680 memReq.memoryTypeBits,
16681 pAllocationCreateInfo,
16684 allocator->GetVulkanFunctions().vkDestroyBuffer(
16685 hDev, hBuffer, allocator->GetAllocationCallbacks());
16692 const VkImageCreateInfo* pImageCreateInfo,
16694 uint32_t* pMemoryTypeIndex)
16696 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16697 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16698 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16699 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16701 const VkDevice hDev = allocator->m_hDevice;
16702 VkImage hImage = VK_NULL_HANDLE;
16703 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16704 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16705 if(res == VK_SUCCESS)
16707 VkMemoryRequirements memReq = {};
16708 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16709 hDev, hImage, &memReq);
16713 memReq.memoryTypeBits,
16714 pAllocationCreateInfo,
16717 allocator->GetVulkanFunctions().vkDestroyImage(
16718 hDev, hImage, allocator->GetAllocationCallbacks());
16728 VMA_ASSERT(allocator && pCreateInfo && pPool);
16730 VMA_DEBUG_LOG(
"vmaCreatePool");
16732 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16734 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16736 #if VMA_RECORDING_ENABLED
16737 if(allocator->GetRecorder() != VMA_NULL)
16739 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16750 VMA_ASSERT(allocator);
16752 if(pool == VK_NULL_HANDLE)
16757 VMA_DEBUG_LOG(
"vmaDestroyPool");
16759 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16761 #if VMA_RECORDING_ENABLED
16762 if(allocator->GetRecorder() != VMA_NULL)
16764 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16768 allocator->DestroyPool(pool);
16776 VMA_ASSERT(allocator && pool && pPoolStats);
16778 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16780 allocator->GetPoolStats(pool, pPoolStats);
16786 size_t* pLostAllocationCount)
16788 VMA_ASSERT(allocator && pool);
16790 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16792 #if VMA_RECORDING_ENABLED
16793 if(allocator->GetRecorder() != VMA_NULL)
16795 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16799 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16804 VMA_ASSERT(allocator && pool);
16806 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16808 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16810 return allocator->CheckPoolCorruption(pool);
16816 const char** ppName)
16818 VMA_ASSERT(allocator && pool);
16820 VMA_DEBUG_LOG(
"vmaGetPoolName");
16822 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16824 *ppName = pool->GetName();
16832 VMA_ASSERT(allocator && pool);
16834 VMA_DEBUG_LOG(
"vmaSetPoolName");
16836 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16838 pool->SetName(pName);
16840 #if VMA_RECORDING_ENABLED
16841 if(allocator->GetRecorder() != VMA_NULL)
16843 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
16850 const VkMemoryRequirements* pVkMemoryRequirements,
16855 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16857 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16859 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16861 VkResult result = allocator->AllocateMemory(
16862 *pVkMemoryRequirements,
16868 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16872 #if VMA_RECORDING_ENABLED
16873 if(allocator->GetRecorder() != VMA_NULL)
16875 allocator->GetRecorder()->RecordAllocateMemory(
16876 allocator->GetCurrentFrameIndex(),
16877 *pVkMemoryRequirements,
16883 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16885 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16893 const VkMemoryRequirements* pVkMemoryRequirements,
16895 size_t allocationCount,
16899 if(allocationCount == 0)
16904 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16906 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16908 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16910 VkResult result = allocator->AllocateMemory(
16911 *pVkMemoryRequirements,
16917 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16921 #if VMA_RECORDING_ENABLED
16922 if(allocator->GetRecorder() != VMA_NULL)
16924 allocator->GetRecorder()->RecordAllocateMemoryPages(
16925 allocator->GetCurrentFrameIndex(),
16926 *pVkMemoryRequirements,
16928 (uint64_t)allocationCount,
16933 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16935 for(
size_t i = 0; i < allocationCount; ++i)
16937 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16951 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16953 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16955 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16957 VkMemoryRequirements vkMemReq = {};
16958 bool requiresDedicatedAllocation =
false;
16959 bool prefersDedicatedAllocation =
false;
16960 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16961 requiresDedicatedAllocation,
16962 prefersDedicatedAllocation);
16964 VkResult result = allocator->AllocateMemory(
16966 requiresDedicatedAllocation,
16967 prefersDedicatedAllocation,
16971 VMA_SUBALLOCATION_TYPE_BUFFER,
16975 #if VMA_RECORDING_ENABLED
16976 if(allocator->GetRecorder() != VMA_NULL)
16978 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16979 allocator->GetCurrentFrameIndex(),
16981 requiresDedicatedAllocation,
16982 prefersDedicatedAllocation,
16988 if(pAllocationInfo && result == VK_SUCCESS)
16990 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17003 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17005 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
17007 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17009 VkMemoryRequirements vkMemReq = {};
17010 bool requiresDedicatedAllocation =
false;
17011 bool prefersDedicatedAllocation =
false;
17012 allocator->GetImageMemoryRequirements(image, vkMemReq,
17013 requiresDedicatedAllocation, prefersDedicatedAllocation);
17015 VkResult result = allocator->AllocateMemory(
17017 requiresDedicatedAllocation,
17018 prefersDedicatedAllocation,
17022 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
17026 #if VMA_RECORDING_ENABLED
17027 if(allocator->GetRecorder() != VMA_NULL)
17029 allocator->GetRecorder()->RecordAllocateMemoryForImage(
17030 allocator->GetCurrentFrameIndex(),
17032 requiresDedicatedAllocation,
17033 prefersDedicatedAllocation,
17039 if(pAllocationInfo && result == VK_SUCCESS)
17041 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17051 VMA_ASSERT(allocator);
17053 if(allocation == VK_NULL_HANDLE)
17058 VMA_DEBUG_LOG(
"vmaFreeMemory");
17060 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17062 #if VMA_RECORDING_ENABLED
17063 if(allocator->GetRecorder() != VMA_NULL)
17065 allocator->GetRecorder()->RecordFreeMemory(
17066 allocator->GetCurrentFrameIndex(),
17071 allocator->FreeMemory(
17078 size_t allocationCount,
17081 if(allocationCount == 0)
17086 VMA_ASSERT(allocator);
17088 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
17090 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17092 #if VMA_RECORDING_ENABLED
17093 if(allocator->GetRecorder() != VMA_NULL)
17095 allocator->GetRecorder()->RecordFreeMemoryPages(
17096 allocator->GetCurrentFrameIndex(),
17097 (uint64_t)allocationCount,
17102 allocator->FreeMemory(allocationCount, pAllocations);
17108 VkDeviceSize newSize)
17110 VMA_ASSERT(allocator && allocation);
17112 VMA_DEBUG_LOG(
"vmaResizeAllocation");
17114 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17116 return allocator->ResizeAllocation(allocation, newSize);
17124 VMA_ASSERT(allocator && allocation && pAllocationInfo);
17126 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17128 #if VMA_RECORDING_ENABLED
17129 if(allocator->GetRecorder() != VMA_NULL)
17131 allocator->GetRecorder()->RecordGetAllocationInfo(
17132 allocator->GetCurrentFrameIndex(),
17137 allocator->GetAllocationInfo(allocation, pAllocationInfo);
17144 VMA_ASSERT(allocator && allocation);
17146 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17148 #if VMA_RECORDING_ENABLED
17149 if(allocator->GetRecorder() != VMA_NULL)
17151 allocator->GetRecorder()->RecordTouchAllocation(
17152 allocator->GetCurrentFrameIndex(),
17157 return allocator->TouchAllocation(allocation);
17165 VMA_ASSERT(allocator && allocation);
17167 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17169 allocation->SetUserData(allocator, pUserData);
17171 #if VMA_RECORDING_ENABLED
17172 if(allocator->GetRecorder() != VMA_NULL)
17174 allocator->GetRecorder()->RecordSetAllocationUserData(
17175 allocator->GetCurrentFrameIndex(),
17186 VMA_ASSERT(allocator && pAllocation);
17188 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17190 allocator->CreateLostAllocation(pAllocation);
17192 #if VMA_RECORDING_ENABLED
17193 if(allocator->GetRecorder() != VMA_NULL)
17195 allocator->GetRecorder()->RecordCreateLostAllocation(
17196 allocator->GetCurrentFrameIndex(),
17207 VMA_ASSERT(allocator && allocation && ppData);
17209 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17211 VkResult res = allocator->Map(allocation, ppData);
17213 #if VMA_RECORDING_ENABLED
17214 if(allocator->GetRecorder() != VMA_NULL)
17216 allocator->GetRecorder()->RecordMapMemory(
17217 allocator->GetCurrentFrameIndex(),
17229 VMA_ASSERT(allocator && allocation);
17231 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17233 #if VMA_RECORDING_ENABLED
17234 if(allocator->GetRecorder() != VMA_NULL)
17236 allocator->GetRecorder()->RecordUnmapMemory(
17237 allocator->GetCurrentFrameIndex(),
17242 allocator->Unmap(allocation);
17247 VMA_ASSERT(allocator && allocation);
17249 VMA_DEBUG_LOG(
"vmaFlushAllocation");
17251 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17253 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
17255 #if VMA_RECORDING_ENABLED
17256 if(allocator->GetRecorder() != VMA_NULL)
17258 allocator->GetRecorder()->RecordFlushAllocation(
17259 allocator->GetCurrentFrameIndex(),
17260 allocation, offset, size);
17267 VMA_ASSERT(allocator && allocation);
17269 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
17271 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17273 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
17275 #if VMA_RECORDING_ENABLED
17276 if(allocator->GetRecorder() != VMA_NULL)
17278 allocator->GetRecorder()->RecordInvalidateAllocation(
17279 allocator->GetCurrentFrameIndex(),
17280 allocation, offset, size);
17287 VMA_ASSERT(allocator);
17289 VMA_DEBUG_LOG(
"vmaCheckCorruption");
17291 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17293 return allocator->CheckCorruption(memoryTypeBits);
17299 size_t allocationCount,
17300 VkBool32* pAllocationsChanged,
17310 if(pDefragmentationInfo != VMA_NULL)
17324 if(res == VK_NOT_READY)
17337 VMA_ASSERT(allocator && pInfo && pContext);
17348 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
17350 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
17352 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17354 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
17356 #if VMA_RECORDING_ENABLED
17357 if(allocator->GetRecorder() != VMA_NULL)
17359 allocator->GetRecorder()->RecordDefragmentationBegin(
17360 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
17371 VMA_ASSERT(allocator);
17373 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
17375 if(context != VK_NULL_HANDLE)
17377 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17379 #if VMA_RECORDING_ENABLED
17380 if(allocator->GetRecorder() != VMA_NULL)
17382 allocator->GetRecorder()->RecordDefragmentationEnd(
17383 allocator->GetCurrentFrameIndex(), context);
17387 return allocator->DefragmentationEnd(context);
17400 VMA_ASSERT(allocator && allocation && buffer);
17402 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
17404 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17406 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
17412 VkDeviceSize allocationLocalOffset,
17416 VMA_ASSERT(allocator && allocation && buffer);
17418 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
17420 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17422 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
17430 VMA_ASSERT(allocator && allocation && image);
17432 VMA_DEBUG_LOG(
"vmaBindImageMemory");
17434 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17436 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
17442 VkDeviceSize allocationLocalOffset,
17446 VMA_ASSERT(allocator && allocation && image);
17448 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
17450 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17452 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
17457 const VkBufferCreateInfo* pBufferCreateInfo,
17463 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
17465 if(pBufferCreateInfo->size == 0)
17467 return VK_ERROR_VALIDATION_FAILED_EXT;
17470 VMA_DEBUG_LOG(
"vmaCreateBuffer");
17472 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17474 *pBuffer = VK_NULL_HANDLE;
17475 *pAllocation = VK_NULL_HANDLE;
17478 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
17479 allocator->m_hDevice,
17481 allocator->GetAllocationCallbacks(),
17486 VkMemoryRequirements vkMemReq = {};
17487 bool requiresDedicatedAllocation =
false;
17488 bool prefersDedicatedAllocation =
false;
17489 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
17490 requiresDedicatedAllocation, prefersDedicatedAllocation);
17494 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
17496 VMA_ASSERT(vkMemReq.alignment %
17497 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
17499 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
17501 VMA_ASSERT(vkMemReq.alignment %
17502 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
17504 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
17506 VMA_ASSERT(vkMemReq.alignment %
17507 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
17511 res = allocator->AllocateMemory(
17513 requiresDedicatedAllocation,
17514 prefersDedicatedAllocation,
17517 *pAllocationCreateInfo,
17518 VMA_SUBALLOCATION_TYPE_BUFFER,
17522 #if VMA_RECORDING_ENABLED
17523 if(allocator->GetRecorder() != VMA_NULL)
17525 allocator->GetRecorder()->RecordCreateBuffer(
17526 allocator->GetCurrentFrameIndex(),
17527 *pBufferCreateInfo,
17528 *pAllocationCreateInfo,
17538 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
17543 #if VMA_STATS_STRING_ENABLED
17544 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
17546 if(pAllocationInfo != VMA_NULL)
17548 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17553 allocator->FreeMemory(
17556 *pAllocation = VK_NULL_HANDLE;
17557 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17558 *pBuffer = VK_NULL_HANDLE;
17561 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
17562 *pBuffer = VK_NULL_HANDLE;
17573 VMA_ASSERT(allocator);
17575 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17580 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
17582 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17584 #if VMA_RECORDING_ENABLED
17585 if(allocator->GetRecorder() != VMA_NULL)
17587 allocator->GetRecorder()->RecordDestroyBuffer(
17588 allocator->GetCurrentFrameIndex(),
17593 if(buffer != VK_NULL_HANDLE)
17595 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
17598 if(allocation != VK_NULL_HANDLE)
17600 allocator->FreeMemory(
17608 const VkImageCreateInfo* pImageCreateInfo,
17614 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
17616 if(pImageCreateInfo->extent.width == 0 ||
17617 pImageCreateInfo->extent.height == 0 ||
17618 pImageCreateInfo->extent.depth == 0 ||
17619 pImageCreateInfo->mipLevels == 0 ||
17620 pImageCreateInfo->arrayLayers == 0)
17622 return VK_ERROR_VALIDATION_FAILED_EXT;
17625 VMA_DEBUG_LOG(
"vmaCreateImage");
17627 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17629 *pImage = VK_NULL_HANDLE;
17630 *pAllocation = VK_NULL_HANDLE;
17633 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
17634 allocator->m_hDevice,
17636 allocator->GetAllocationCallbacks(),
17640 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
17641 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
17642 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
17645 VkMemoryRequirements vkMemReq = {};
17646 bool requiresDedicatedAllocation =
false;
17647 bool prefersDedicatedAllocation =
false;
17648 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
17649 requiresDedicatedAllocation, prefersDedicatedAllocation);
17651 res = allocator->AllocateMemory(
17653 requiresDedicatedAllocation,
17654 prefersDedicatedAllocation,
17657 *pAllocationCreateInfo,
17662 #if VMA_RECORDING_ENABLED
17663 if(allocator->GetRecorder() != VMA_NULL)
17665 allocator->GetRecorder()->RecordCreateImage(
17666 allocator->GetCurrentFrameIndex(),
17668 *pAllocationCreateInfo,
17678 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
17683 #if VMA_STATS_STRING_ENABLED
17684 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
17686 if(pAllocationInfo != VMA_NULL)
17688 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17693 allocator->FreeMemory(
17696 *pAllocation = VK_NULL_HANDLE;
17697 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17698 *pImage = VK_NULL_HANDLE;
17701 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17702 *pImage = VK_NULL_HANDLE;
17713 VMA_ASSERT(allocator);
17715 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17720 VMA_DEBUG_LOG(
"vmaDestroyImage");
17722 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17724 #if VMA_RECORDING_ENABLED
17725 if(allocator->GetRecorder() != VMA_NULL)
17727 allocator->GetRecorder()->RecordDestroyImage(
17728 allocator->GetCurrentFrameIndex(),
17733 if(image != VK_NULL_HANDLE)
17735 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17737 if(allocation != VK_NULL_HANDLE)
17739 allocator->FreeMemory(
17745 #endif // #ifdef VMA_IMPLEMENTATION