23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1826 #ifndef VMA_RECORDING_ENABLED
1827 #define VMA_RECORDING_ENABLED 0
1831 #define NOMINMAX // For windows.h
1835 #include <vulkan/vulkan.h>
1838 #if VMA_RECORDING_ENABLED
1839 #include <windows.h>
1845 #if !defined(VMA_VULKAN_VERSION)
1846 #if defined(VK_VERSION_1_1)
1847 #define VMA_VULKAN_VERSION 1001000
1849 #define VMA_VULKAN_VERSION 1000000
1853 #if !defined(VMA_DEDICATED_ALLOCATION)
1854 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1855 #define VMA_DEDICATED_ALLOCATION 1
1857 #define VMA_DEDICATED_ALLOCATION 0
1861 #if !defined(VMA_BIND_MEMORY2)
1862 #if VK_KHR_bind_memory2
1863 #define VMA_BIND_MEMORY2 1
1865 #define VMA_BIND_MEMORY2 0
1869 #if !defined(VMA_MEMORY_BUDGET)
1870 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
1871 #define VMA_MEMORY_BUDGET 1
1873 #define VMA_MEMORY_BUDGET 0
1882 #ifndef VMA_CALL_PRE
1883 #define VMA_CALL_PRE
1885 #ifndef VMA_CALL_POST
1886 #define VMA_CALL_POST
1903 uint32_t memoryType,
1904 VkDeviceMemory memory,
1909 uint32_t memoryType,
1910 VkDeviceMemory memory,
2031 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2032 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
2033 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
2035 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2036 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
2037 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
2039 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2040 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
2210 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
2218 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
2228 uint32_t memoryTypeIndex,
2229 VkMemoryPropertyFlags* pFlags);
2241 uint32_t frameIndex);
2337 #ifndef VMA_STATS_STRING_ENABLED
2338 #define VMA_STATS_STRING_ENABLED 1
2341 #if VMA_STATS_STRING_ENABLED
2348 char** ppStatsString,
2349 VkBool32 detailedMap);
2353 char* pStatsString);
2355 #endif // #if VMA_STATS_STRING_ENABLED
2607 uint32_t memoryTypeBits,
2609 uint32_t* pMemoryTypeIndex);
2625 const VkBufferCreateInfo* pBufferCreateInfo,
2627 uint32_t* pMemoryTypeIndex);
2643 const VkImageCreateInfo* pImageCreateInfo,
2645 uint32_t* pMemoryTypeIndex);
2817 size_t* pLostAllocationCount);
2844 const char** ppName);
2937 const VkMemoryRequirements* pVkMemoryRequirements,
2963 const VkMemoryRequirements* pVkMemoryRequirements,
2965 size_t allocationCount,
3010 size_t allocationCount,
3023 VkDeviceSize newSize);
3441 size_t allocationCount,
3442 VkBool32* pAllocationsChanged,
3476 VkDeviceSize allocationLocalOffset,
3510 VkDeviceSize allocationLocalOffset,
3542 const VkBufferCreateInfo* pBufferCreateInfo,
3567 const VkImageCreateInfo* pImageCreateInfo,
3593 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3596 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3597 #define VMA_IMPLEMENTATION
3600 #ifdef VMA_IMPLEMENTATION
3601 #undef VMA_IMPLEMENTATION
3624 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3625 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3637 #if VMA_USE_STL_CONTAINERS
3638 #define VMA_USE_STL_VECTOR 1
3639 #define VMA_USE_STL_UNORDERED_MAP 1
3640 #define VMA_USE_STL_LIST 1
3643 #ifndef VMA_USE_STL_SHARED_MUTEX
3645 #if __cplusplus >= 201703L
3646 #define VMA_USE_STL_SHARED_MUTEX 1
3650 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3651 #define VMA_USE_STL_SHARED_MUTEX 1
3653 #define VMA_USE_STL_SHARED_MUTEX 0
3661 #if VMA_USE_STL_VECTOR
3665 #if VMA_USE_STL_UNORDERED_MAP
3666 #include <unordered_map>
3669 #if VMA_USE_STL_LIST
3678 #include <algorithm>
3683 #define VMA_NULL nullptr
3686 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3688 void *aligned_alloc(
size_t alignment,
size_t size)
3691 if(alignment <
sizeof(
void*))
3693 alignment =
sizeof(
void*);
3696 return memalign(alignment, size);
3698 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3700 void *aligned_alloc(
size_t alignment,
size_t size)
3703 if(alignment <
sizeof(
void*))
3705 alignment =
sizeof(
void*);
3709 if(posix_memalign(&pointer, alignment, size) == 0)
3723 #define VMA_ASSERT(expr)
3725 #define VMA_ASSERT(expr) assert(expr)
3731 #ifndef VMA_HEAVY_ASSERT
3733 #define VMA_HEAVY_ASSERT(expr)
3735 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3739 #ifndef VMA_ALIGN_OF
3740 #define VMA_ALIGN_OF(type) (__alignof(type))
3743 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3745 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3747 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3751 #ifndef VMA_SYSTEM_FREE
3753 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3755 #define VMA_SYSTEM_FREE(ptr) free(ptr)
3760 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
3764 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
3768 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
3772 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
3775 #ifndef VMA_DEBUG_LOG
3776 #define VMA_DEBUG_LOG(format, ...)
3786 #if VMA_STATS_STRING_ENABLED
3787 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3789 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
3791 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3793 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
3795 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3797 snprintf(outStr, strLen,
"%p", ptr);
3805 void Lock() { m_Mutex.lock(); }
3806 void Unlock() { m_Mutex.unlock(); }
3807 bool TryLock() {
return m_Mutex.try_lock(); }
3811 #define VMA_MUTEX VmaMutex
3815 #ifndef VMA_RW_MUTEX
3816 #if VMA_USE_STL_SHARED_MUTEX
3818 #include <shared_mutex>
3822 void LockRead() { m_Mutex.lock_shared(); }
3823 void UnlockRead() { m_Mutex.unlock_shared(); }
3824 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
3825 void LockWrite() { m_Mutex.lock(); }
3826 void UnlockWrite() { m_Mutex.unlock(); }
3827 bool TryLockWrite() {
return m_Mutex.try_lock(); }
3829 std::shared_mutex m_Mutex;
3831 #define VMA_RW_MUTEX VmaRWMutex
3832 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
3838 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3839 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3840 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3841 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
3842 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3843 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3844 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
3848 #define VMA_RW_MUTEX VmaRWMutex
3854 void LockRead() { m_Mutex.Lock(); }
3855 void UnlockRead() { m_Mutex.Unlock(); }
3856 bool TryLockRead() {
return m_Mutex.TryLock(); }
3857 void LockWrite() { m_Mutex.Lock(); }
3858 void UnlockWrite() { m_Mutex.Unlock(); }
3859 bool TryLockWrite() {
return m_Mutex.TryLock(); }
3863 #define VMA_RW_MUTEX VmaRWMutex
3864 #endif // #if VMA_USE_STL_SHARED_MUTEX
3865 #endif // #ifndef VMA_RW_MUTEX
3870 #ifndef VMA_ATOMIC_UINT32
3872 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
3875 #ifndef VMA_ATOMIC_UINT64
3877 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
3880 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
3885 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
3888 #ifndef VMA_DEBUG_ALIGNMENT
3893 #define VMA_DEBUG_ALIGNMENT (1)
3896 #ifndef VMA_DEBUG_MARGIN
3901 #define VMA_DEBUG_MARGIN (0)
3904 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
3909 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
3912 #ifndef VMA_DEBUG_DETECT_CORRUPTION
3918 #define VMA_DEBUG_DETECT_CORRUPTION (0)
3921 #ifndef VMA_DEBUG_GLOBAL_MUTEX
3926 #define VMA_DEBUG_GLOBAL_MUTEX (0)
3929 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
3934 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
3937 #ifndef VMA_SMALL_HEAP_MAX_SIZE
3938 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
3942 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
3943 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
3947 #ifndef VMA_CLASS_NO_COPY
3948 #define VMA_CLASS_NO_COPY(className) \
3950 className(const className&) = delete; \
3951 className& operator=(const className&) = delete;
3954 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3957 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3959 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3960 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3968 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
3969 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
3972 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3974 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3975 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3978 static inline uint32_t VmaCountBitsSet(uint32_t v)
3980 uint32_t c = v - ((v >> 1) & 0x55555555);
3981 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3982 c = ((c >> 4) + c) & 0x0F0F0F0F;
3983 c = ((c >> 8) + c) & 0x00FF00FF;
3984 c = ((c >> 16) + c) & 0x0000FFFF;
3990 template <
typename T>
3991 static inline T VmaAlignUp(T val, T align)
3993 return (val + align - 1) / align * align;
3997 template <
typename T>
3998 static inline T VmaAlignDown(T val, T align)
4000 return val / align * align;
4004 template <
typename T>
4005 static inline T VmaRoundDiv(T x, T y)
4007 return (x + (y / (T)2)) / y;
4015 template <
typename T>
4016 inline bool VmaIsPow2(T x)
4018 return (x & (x-1)) == 0;
4022 static inline uint32_t VmaNextPow2(uint32_t v)
4033 static inline uint64_t VmaNextPow2(uint64_t v)
4047 static inline uint32_t VmaPrevPow2(uint32_t v)
4057 static inline uint64_t VmaPrevPow2(uint64_t v)
4069 static inline bool VmaStrIsEmpty(
const char* pStr)
4071 return pStr == VMA_NULL || *pStr ==
'\0';
4074 #if VMA_STATS_STRING_ENABLED
4076 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4092 #endif // #if VMA_STATS_STRING_ENABLED
4096 template<
typename Iterator,
typename Compare>
4097 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4099 Iterator centerValue = end; --centerValue;
4100 Iterator insertIndex = beg;
4101 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4103 if(cmp(*memTypeIndex, *centerValue))
4105 if(insertIndex != memTypeIndex)
4107 VMA_SWAP(*memTypeIndex, *insertIndex);
4112 if(insertIndex != centerValue)
4114 VMA_SWAP(*insertIndex, *centerValue);
4119 template<
typename Iterator,
typename Compare>
4120 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4124 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4125 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4126 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4130 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4132 #endif // #ifndef VMA_SORT
4141 static inline bool VmaBlocksOnSamePage(
4142 VkDeviceSize resourceAOffset,
4143 VkDeviceSize resourceASize,
4144 VkDeviceSize resourceBOffset,
4145 VkDeviceSize pageSize)
4147 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4148 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4149 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4150 VkDeviceSize resourceBStart = resourceBOffset;
4151 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4152 return resourceAEndPage == resourceBStartPage;
4155 enum VmaSuballocationType
4157 VMA_SUBALLOCATION_TYPE_FREE = 0,
4158 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4159 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4160 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4161 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4162 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4163 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4172 static inline bool VmaIsBufferImageGranularityConflict(
4173 VmaSuballocationType suballocType1,
4174 VmaSuballocationType suballocType2)
4176 if(suballocType1 > suballocType2)
4178 VMA_SWAP(suballocType1, suballocType2);
4181 switch(suballocType1)
4183 case VMA_SUBALLOCATION_TYPE_FREE:
4185 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4187 case VMA_SUBALLOCATION_TYPE_BUFFER:
4189 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4190 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4191 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4193 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4194 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4195 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4196 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4198 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4199 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4207 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4209 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4210 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4211 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4212 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4214 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4221 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4223 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4224 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4225 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4226 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4228 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4241 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4243 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4244 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4245 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4246 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4252 VMA_CLASS_NO_COPY(VmaMutexLock)
4254 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4255 m_pMutex(useMutex ? &mutex : VMA_NULL)
4256 {
if(m_pMutex) { m_pMutex->Lock(); } }
4258 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4260 VMA_MUTEX* m_pMutex;
4264 struct VmaMutexLockRead
4266 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4268 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4269 m_pMutex(useMutex ? &mutex : VMA_NULL)
4270 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4271 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4273 VMA_RW_MUTEX* m_pMutex;
4277 struct VmaMutexLockWrite
4279 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4281 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4282 m_pMutex(useMutex ? &mutex : VMA_NULL)
4283 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4284 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4286 VMA_RW_MUTEX* m_pMutex;
4289 #if VMA_DEBUG_GLOBAL_MUTEX
4290 static VMA_MUTEX gDebugGlobalMutex;
4291 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4293 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4297 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4308 template <
typename CmpLess,
typename IterT,
typename KeyT>
4309 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4311 size_t down = 0, up = (end - beg);
4314 const size_t mid = (down + up) / 2;
4315 if(cmp(*(beg+mid), key))
4327 template<
typename CmpLess,
typename IterT,
typename KeyT>
4328 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4330 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4331 beg, end, value, cmp);
4333 (!cmp(*it, value) && !cmp(value, *it)))
4345 template<
typename T>
4346 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4348 for(uint32_t i = 0; i < count; ++i)
4350 const T iPtr = arr[i];
4351 if(iPtr == VMA_NULL)
4355 for(uint32_t j = i + 1; j < count; ++j)
4369 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4371 if((pAllocationCallbacks != VMA_NULL) &&
4372 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4374 return (*pAllocationCallbacks->pfnAllocation)(
4375 pAllocationCallbacks->pUserData,
4378 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4382 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4386 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4388 if((pAllocationCallbacks != VMA_NULL) &&
4389 (pAllocationCallbacks->pfnFree != VMA_NULL))
4391 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4395 VMA_SYSTEM_FREE(ptr);
4399 template<
typename T>
4400 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4402 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4405 template<
typename T>
4406 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4408 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4411 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4413 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4415 template<
typename T>
4416 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4419 VmaFree(pAllocationCallbacks, ptr);
4422 template<
typename T>
4423 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4427 for(
size_t i = count; i--; )
4431 VmaFree(pAllocationCallbacks, ptr);
4435 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4437 if(srcStr != VMA_NULL)
4439 const size_t len = strlen(srcStr);
4440 char*
const result = vma_new_array(allocs,
char, len + 1);
4441 memcpy(result, srcStr, len + 1);
4450 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4454 const size_t len = strlen(str);
4455 vma_delete_array(allocs, str, len + 1);
4460 template<
typename T>
4461 class VmaStlAllocator
4464 const VkAllocationCallbacks*
const m_pCallbacks;
4465 typedef T value_type;
4467 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4468 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4470 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4471 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4473 template<
typename U>
4474 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4476 return m_pCallbacks == rhs.m_pCallbacks;
4478 template<
typename U>
4479 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4481 return m_pCallbacks != rhs.m_pCallbacks;
4484 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4487 #if VMA_USE_STL_VECTOR
4489 #define VmaVector std::vector
4491 template<
typename T,
typename allocatorT>
4492 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4494 vec.insert(vec.begin() + index, item);
4497 template<
typename T,
typename allocatorT>
4498 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4500 vec.erase(vec.begin() + index);
4503 #else // #if VMA_USE_STL_VECTOR
4508 template<
typename T,
typename AllocatorT>
4512 typedef T value_type;
4514 VmaVector(
const AllocatorT& allocator) :
4515 m_Allocator(allocator),
4522 VmaVector(
size_t count,
const AllocatorT& allocator) :
4523 m_Allocator(allocator),
4524 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4532 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4533 : VmaVector(count, allocator) {}
4535 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4536 m_Allocator(src.m_Allocator),
4537 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4538 m_Count(src.m_Count),
4539 m_Capacity(src.m_Count)
4543 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4549 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4552 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4556 resize(rhs.m_Count);
4559 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4565 bool empty()
const {
return m_Count == 0; }
4566 size_t size()
const {
return m_Count; }
4567 T* data() {
return m_pArray; }
4568 const T* data()
const {
return m_pArray; }
4570 T& operator[](
size_t index)
4572 VMA_HEAVY_ASSERT(index < m_Count);
4573 return m_pArray[index];
4575 const T& operator[](
size_t index)
const
4577 VMA_HEAVY_ASSERT(index < m_Count);
4578 return m_pArray[index];
4583 VMA_HEAVY_ASSERT(m_Count > 0);
4586 const T& front()
const
4588 VMA_HEAVY_ASSERT(m_Count > 0);
4593 VMA_HEAVY_ASSERT(m_Count > 0);
4594 return m_pArray[m_Count - 1];
4596 const T& back()
const
4598 VMA_HEAVY_ASSERT(m_Count > 0);
4599 return m_pArray[m_Count - 1];
4602 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4604 newCapacity = VMA_MAX(newCapacity, m_Count);
4606 if((newCapacity < m_Capacity) && !freeMemory)
4608 newCapacity = m_Capacity;
4611 if(newCapacity != m_Capacity)
4613 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4616 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4618 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4619 m_Capacity = newCapacity;
4620 m_pArray = newArray;
4624 void resize(
size_t newCount,
bool freeMemory =
false)
4626 size_t newCapacity = m_Capacity;
4627 if(newCount > m_Capacity)
4629 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4633 newCapacity = newCount;
4636 if(newCapacity != m_Capacity)
4638 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4639 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4640 if(elementsToCopy != 0)
4642 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4644 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4645 m_Capacity = newCapacity;
4646 m_pArray = newArray;
4652 void clear(
bool freeMemory =
false)
4654 resize(0, freeMemory);
4657 void insert(
size_t index,
const T& src)
4659 VMA_HEAVY_ASSERT(index <= m_Count);
4660 const size_t oldCount = size();
4661 resize(oldCount + 1);
4662 if(index < oldCount)
4664 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4666 m_pArray[index] = src;
4669 void remove(
size_t index)
4671 VMA_HEAVY_ASSERT(index < m_Count);
4672 const size_t oldCount = size();
4673 if(index < oldCount - 1)
4675 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4677 resize(oldCount - 1);
4680 void push_back(
const T& src)
4682 const size_t newIndex = size();
4683 resize(newIndex + 1);
4684 m_pArray[newIndex] = src;
4689 VMA_HEAVY_ASSERT(m_Count > 0);
4693 void push_front(
const T& src)
4700 VMA_HEAVY_ASSERT(m_Count > 0);
4704 typedef T* iterator;
4706 iterator begin() {
return m_pArray; }
4707 iterator end() {
return m_pArray + m_Count; }
4710 AllocatorT m_Allocator;
4716 template<
typename T,
typename allocatorT>
4717 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4719 vec.insert(index, item);
4722 template<
typename T,
typename allocatorT>
4723 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4728 #endif // #if VMA_USE_STL_VECTOR
4730 template<
typename CmpLess,
typename VectorT>
4731 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4733 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4735 vector.data() + vector.size(),
4737 CmpLess()) - vector.data();
4738 VmaVectorInsert(vector, indexToInsert, value);
4739 return indexToInsert;
4742 template<
typename CmpLess,
typename VectorT>
4743 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4746 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4751 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4753 size_t indexToRemove = it - vector.begin();
4754 VmaVectorRemove(vector, indexToRemove);
4768 template<
typename T>
4769 class VmaPoolAllocator
4771 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4773 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4774 ~VmaPoolAllocator();
4775 template<
typename... Types> T* Alloc(Types... args);
4781 uint32_t NextFreeIndex;
4782 alignas(T)
char Value[
sizeof(T)];
4789 uint32_t FirstFreeIndex;
4792 const VkAllocationCallbacks* m_pAllocationCallbacks;
4793 const uint32_t m_FirstBlockCapacity;
4794 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4796 ItemBlock& CreateNewBlock();
4799 template<
typename T>
4800 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4801 m_pAllocationCallbacks(pAllocationCallbacks),
4802 m_FirstBlockCapacity(firstBlockCapacity),
4803 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4805 VMA_ASSERT(m_FirstBlockCapacity > 1);
4808 template<
typename T>
4809 VmaPoolAllocator<T>::~VmaPoolAllocator()
4811 for(
size_t i = m_ItemBlocks.size(); i--; )
4812 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4813 m_ItemBlocks.clear();
4816 template<
typename T>
4817 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
4819 for(
size_t i = m_ItemBlocks.size(); i--; )
4821 ItemBlock& block = m_ItemBlocks[i];
4823 if(block.FirstFreeIndex != UINT32_MAX)
4825 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4826 block.FirstFreeIndex = pItem->NextFreeIndex;
4827 T* result = (T*)&pItem->Value;
4828 new(result)T(std::forward<Types>(args)...);
4834 ItemBlock& newBlock = CreateNewBlock();
4835 Item*
const pItem = &newBlock.pItems[0];
4836 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4837 T* result = (T*)&pItem->Value;
4838 new(result)T(std::forward<Types>(args)...);
4842 template<
typename T>
4843 void VmaPoolAllocator<T>::Free(T* ptr)
4846 for(
size_t i = m_ItemBlocks.size(); i--; )
4848 ItemBlock& block = m_ItemBlocks[i];
4852 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4855 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4858 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
4859 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4860 block.FirstFreeIndex = index;
4864 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4867 template<
typename T>
4868 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4870 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4871 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4873 const ItemBlock newBlock = {
4874 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4878 m_ItemBlocks.push_back(newBlock);
4881 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4882 newBlock.pItems[i].NextFreeIndex = i + 1;
4883 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4884 return m_ItemBlocks.back();
4890 #if VMA_USE_STL_LIST
4892 #define VmaList std::list
4894 #else // #if VMA_USE_STL_LIST
4896 template<
typename T>
4905 template<
typename T>
4908 VMA_CLASS_NO_COPY(VmaRawList)
4910 typedef VmaListItem<T> ItemType;
4912 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4916 size_t GetCount()
const {
return m_Count; }
4917 bool IsEmpty()
const {
return m_Count == 0; }
4919 ItemType* Front() {
return m_pFront; }
4920 const ItemType* Front()
const {
return m_pFront; }
4921 ItemType* Back() {
return m_pBack; }
4922 const ItemType* Back()
const {
return m_pBack; }
4924 ItemType* PushBack();
4925 ItemType* PushFront();
4926 ItemType* PushBack(
const T& value);
4927 ItemType* PushFront(
const T& value);
4932 ItemType* InsertBefore(ItemType* pItem);
4934 ItemType* InsertAfter(ItemType* pItem);
4936 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4937 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4939 void Remove(ItemType* pItem);
4942 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4943 VmaPoolAllocator<ItemType> m_ItemAllocator;
4949 template<
typename T>
4950 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4951 m_pAllocationCallbacks(pAllocationCallbacks),
4952 m_ItemAllocator(pAllocationCallbacks, 128),
4959 template<
typename T>
4960 VmaRawList<T>::~VmaRawList()
4966 template<
typename T>
4967 void VmaRawList<T>::Clear()
4969 if(IsEmpty() ==
false)
4971 ItemType* pItem = m_pBack;
4972 while(pItem != VMA_NULL)
4974 ItemType*
const pPrevItem = pItem->pPrev;
4975 m_ItemAllocator.Free(pItem);
4978 m_pFront = VMA_NULL;
4984 template<
typename T>
4985 VmaListItem<T>* VmaRawList<T>::PushBack()
4987 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4988 pNewItem->pNext = VMA_NULL;
4991 pNewItem->pPrev = VMA_NULL;
4992 m_pFront = pNewItem;
4998 pNewItem->pPrev = m_pBack;
4999 m_pBack->pNext = pNewItem;
5006 template<
typename T>
5007 VmaListItem<T>* VmaRawList<T>::PushFront()
5009 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5010 pNewItem->pPrev = VMA_NULL;
5013 pNewItem->pNext = VMA_NULL;
5014 m_pFront = pNewItem;
5020 pNewItem->pNext = m_pFront;
5021 m_pFront->pPrev = pNewItem;
5022 m_pFront = pNewItem;
5028 template<
typename T>
5029 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5031 ItemType*
const pNewItem = PushBack();
5032 pNewItem->Value = value;
5036 template<
typename T>
5037 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5039 ItemType*
const pNewItem = PushFront();
5040 pNewItem->Value = value;
5044 template<
typename T>
5045 void VmaRawList<T>::PopBack()
5047 VMA_HEAVY_ASSERT(m_Count > 0);
5048 ItemType*
const pBackItem = m_pBack;
5049 ItemType*
const pPrevItem = pBackItem->pPrev;
5050 if(pPrevItem != VMA_NULL)
5052 pPrevItem->pNext = VMA_NULL;
5054 m_pBack = pPrevItem;
5055 m_ItemAllocator.Free(pBackItem);
5059 template<
typename T>
5060 void VmaRawList<T>::PopFront()
5062 VMA_HEAVY_ASSERT(m_Count > 0);
5063 ItemType*
const pFrontItem = m_pFront;
5064 ItemType*
const pNextItem = pFrontItem->pNext;
5065 if(pNextItem != VMA_NULL)
5067 pNextItem->pPrev = VMA_NULL;
5069 m_pFront = pNextItem;
5070 m_ItemAllocator.Free(pFrontItem);
5074 template<
typename T>
5075 void VmaRawList<T>::Remove(ItemType* pItem)
5077 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5078 VMA_HEAVY_ASSERT(m_Count > 0);
5080 if(pItem->pPrev != VMA_NULL)
5082 pItem->pPrev->pNext = pItem->pNext;
5086 VMA_HEAVY_ASSERT(m_pFront == pItem);
5087 m_pFront = pItem->pNext;
5090 if(pItem->pNext != VMA_NULL)
5092 pItem->pNext->pPrev = pItem->pPrev;
5096 VMA_HEAVY_ASSERT(m_pBack == pItem);
5097 m_pBack = pItem->pPrev;
5100 m_ItemAllocator.Free(pItem);
5104 template<
typename T>
5105 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5107 if(pItem != VMA_NULL)
5109 ItemType*
const prevItem = pItem->pPrev;
5110 ItemType*
const newItem = m_ItemAllocator.Alloc();
5111 newItem->pPrev = prevItem;
5112 newItem->pNext = pItem;
5113 pItem->pPrev = newItem;
5114 if(prevItem != VMA_NULL)
5116 prevItem->pNext = newItem;
5120 VMA_HEAVY_ASSERT(m_pFront == pItem);
5130 template<
typename T>
5131 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5133 if(pItem != VMA_NULL)
5135 ItemType*
const nextItem = pItem->pNext;
5136 ItemType*
const newItem = m_ItemAllocator.Alloc();
5137 newItem->pNext = nextItem;
5138 newItem->pPrev = pItem;
5139 pItem->pNext = newItem;
5140 if(nextItem != VMA_NULL)
5142 nextItem->pPrev = newItem;
5146 VMA_HEAVY_ASSERT(m_pBack == pItem);
5156 template<
typename T>
5157 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5159 ItemType*
const newItem = InsertBefore(pItem);
5160 newItem->Value = value;
5164 template<
typename T>
5165 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5167 ItemType*
const newItem = InsertAfter(pItem);
5168 newItem->Value = value;
5172 template<
typename T,
typename AllocatorT>
5175 VMA_CLASS_NO_COPY(VmaList)
5186 T& operator*()
const
5188 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5189 return m_pItem->Value;
5191 T* operator->()
const
5193 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5194 return &m_pItem->Value;
5197 iterator& operator++()
5199 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5200 m_pItem = m_pItem->pNext;
5203 iterator& operator--()
5205 if(m_pItem != VMA_NULL)
5207 m_pItem = m_pItem->pPrev;
5211 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5212 m_pItem = m_pList->Back();
5217 iterator operator++(
int)
5219 iterator result = *
this;
5223 iterator operator--(
int)
5225 iterator result = *
this;
5230 bool operator==(
const iterator& rhs)
const
5232 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5233 return m_pItem == rhs.m_pItem;
5235 bool operator!=(
const iterator& rhs)
const
5237 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5238 return m_pItem != rhs.m_pItem;
5242 VmaRawList<T>* m_pList;
5243 VmaListItem<T>* m_pItem;
5245 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5251 friend class VmaList<T, AllocatorT>;
5254 class const_iterator
5263 const_iterator(
const iterator& src) :
5264 m_pList(src.m_pList),
5265 m_pItem(src.m_pItem)
5269 const T& operator*()
const
5271 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5272 return m_pItem->Value;
5274 const T* operator->()
const
5276 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5277 return &m_pItem->Value;
5280 const_iterator& operator++()
5282 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5283 m_pItem = m_pItem->pNext;
5286 const_iterator& operator--()
5288 if(m_pItem != VMA_NULL)
5290 m_pItem = m_pItem->pPrev;
5294 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5295 m_pItem = m_pList->Back();
5300 const_iterator operator++(
int)
5302 const_iterator result = *
this;
5306 const_iterator operator--(
int)
5308 const_iterator result = *
this;
5313 bool operator==(
const const_iterator& rhs)
const
5315 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5316 return m_pItem == rhs.m_pItem;
5318 bool operator!=(
const const_iterator& rhs)
const
5320 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5321 return m_pItem != rhs.m_pItem;
5325 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5331 const VmaRawList<T>* m_pList;
5332 const VmaListItem<T>* m_pItem;
5334 friend class VmaList<T, AllocatorT>;
5337 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5339 bool empty()
const {
return m_RawList.IsEmpty(); }
5340 size_t size()
const {
return m_RawList.GetCount(); }
5342 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5343 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5345 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5346 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5348 void clear() { m_RawList.Clear(); }
5349 void push_back(
const T& value) { m_RawList.PushBack(value); }
5350 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5351 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5354 VmaRawList<T> m_RawList;
5357 #endif // #if VMA_USE_STL_LIST
5365 #if VMA_USE_STL_UNORDERED_MAP
5367 #define VmaPair std::pair
5369 #define VMA_MAP_TYPE(KeyT, ValueT) \
5370 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5372 #else // #if VMA_USE_STL_UNORDERED_MAP
5374 template<
typename T1,
typename T2>
5380 VmaPair() : first(), second() { }
5381 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5387 template<
typename KeyT,
typename ValueT>
5391 typedef VmaPair<KeyT, ValueT> PairType;
5392 typedef PairType* iterator;
5394 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5396 iterator begin() {
return m_Vector.begin(); }
5397 iterator end() {
return m_Vector.end(); }
5399 void insert(
const PairType& pair);
5400 iterator find(
const KeyT& key);
5401 void erase(iterator it);
5404 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5407 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5409 template<
typename FirstT,
typename SecondT>
5410 struct VmaPairFirstLess
5412 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5414 return lhs.first < rhs.first;
5416 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5418 return lhs.first < rhsFirst;
5422 template<
typename KeyT,
typename ValueT>
5423 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5425 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5427 m_Vector.data() + m_Vector.size(),
5429 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5430 VmaVectorInsert(m_Vector, indexToInsert, pair);
5433 template<
typename KeyT,
typename ValueT>
5434 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5436 PairType* it = VmaBinaryFindFirstNotLess(
5438 m_Vector.data() + m_Vector.size(),
5440 VmaPairFirstLess<KeyT, ValueT>());
5441 if((it != m_Vector.end()) && (it->first == key))
5447 return m_Vector.end();
5451 template<
typename KeyT,
typename ValueT>
5452 void VmaMap<KeyT, ValueT>::erase(iterator it)
5454 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5457 #endif // #if VMA_USE_STL_UNORDERED_MAP
5463 class VmaDeviceMemoryBlock;
5465 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5467 struct VmaAllocation_T
5470 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5474 FLAG_USER_DATA_STRING = 0x01,
5478 enum ALLOCATION_TYPE
5480 ALLOCATION_TYPE_NONE,
5481 ALLOCATION_TYPE_BLOCK,
5482 ALLOCATION_TYPE_DEDICATED,
5489 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
5492 m_pUserData{VMA_NULL},
5493 m_LastUseFrameIndex{currentFrameIndex},
5494 m_MemoryTypeIndex{0},
5495 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
5496 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
5498 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
5500 #if VMA_STATS_STRING_ENABLED
5501 m_CreationFrameIndex = currentFrameIndex;
5502 m_BufferImageUsage = 0;
5508 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5511 VMA_ASSERT(m_pUserData == VMA_NULL);
5514 void InitBlockAllocation(
5515 VmaDeviceMemoryBlock* block,
5516 VkDeviceSize offset,
5517 VkDeviceSize alignment,
5519 uint32_t memoryTypeIndex,
5520 VmaSuballocationType suballocationType,
5524 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5525 VMA_ASSERT(block != VMA_NULL);
5526 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5527 m_Alignment = alignment;
5529 m_MemoryTypeIndex = memoryTypeIndex;
5530 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5531 m_SuballocationType = (uint8_t)suballocationType;
5532 m_BlockAllocation.m_Block = block;
5533 m_BlockAllocation.m_Offset = offset;
5534 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5539 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5540 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5541 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5542 m_MemoryTypeIndex = 0;
5543 m_BlockAllocation.m_Block = VMA_NULL;
5544 m_BlockAllocation.m_Offset = 0;
5545 m_BlockAllocation.m_CanBecomeLost =
true;
5548 void ChangeBlockAllocation(
5550 VmaDeviceMemoryBlock* block,
5551 VkDeviceSize offset);
5553 void ChangeOffset(VkDeviceSize newOffset);
5556 void InitDedicatedAllocation(
5557 uint32_t memoryTypeIndex,
5558 VkDeviceMemory hMemory,
5559 VmaSuballocationType suballocationType,
5563 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5564 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5565 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5568 m_MemoryTypeIndex = memoryTypeIndex;
5569 m_SuballocationType = (uint8_t)suballocationType;
5570 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5571 m_DedicatedAllocation.m_hMemory = hMemory;
5572 m_DedicatedAllocation.m_pMappedData = pMappedData;
5575 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5576 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5577 VkDeviceSize GetSize()
const {
return m_Size; }
5578 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5579 void* GetUserData()
const {
return m_pUserData; }
5580 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5581 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5583 VmaDeviceMemoryBlock* GetBlock()
const
5585 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5586 return m_BlockAllocation.m_Block;
5588 VkDeviceSize GetOffset()
const;
5589 VkDeviceMemory GetMemory()
const;
5590 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5591 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5592 void* GetMappedData()
const;
5593 bool CanBecomeLost()
const;
5595 uint32_t GetLastUseFrameIndex()
const
5597 return m_LastUseFrameIndex.load();
5599 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5601 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5611 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5613 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5615 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5626 void BlockAllocMap();
5627 void BlockAllocUnmap();
5628 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5631 #if VMA_STATS_STRING_ENABLED
5632 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5633 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5635 void InitBufferImageUsage(uint32_t bufferImageUsage)
5637 VMA_ASSERT(m_BufferImageUsage == 0);
5638 m_BufferImageUsage = bufferImageUsage;
5641 void PrintParameters(
class VmaJsonWriter& json)
const;
5645 VkDeviceSize m_Alignment;
5646 VkDeviceSize m_Size;
5648 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5649 uint32_t m_MemoryTypeIndex;
5651 uint8_t m_SuballocationType;
5658 struct BlockAllocation
5660 VmaDeviceMemoryBlock* m_Block;
5661 VkDeviceSize m_Offset;
5662 bool m_CanBecomeLost;
5666 struct DedicatedAllocation
5668 VkDeviceMemory m_hMemory;
5669 void* m_pMappedData;
5675 BlockAllocation m_BlockAllocation;
5677 DedicatedAllocation m_DedicatedAllocation;
5680 #if VMA_STATS_STRING_ENABLED
5681 uint32_t m_CreationFrameIndex;
5682 uint32_t m_BufferImageUsage;
5692 struct VmaSuballocation
5694 VkDeviceSize offset;
5697 VmaSuballocationType type;
5701 struct VmaSuballocationOffsetLess
5703 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5705 return lhs.offset < rhs.offset;
5708 struct VmaSuballocationOffsetGreater
5710 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5712 return lhs.offset > rhs.offset;
5716 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5719 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5721 enum class VmaAllocationRequestType
5743 struct VmaAllocationRequest
5745 VkDeviceSize offset;
5746 VkDeviceSize sumFreeSize;
5747 VkDeviceSize sumItemSize;
5748 VmaSuballocationList::iterator item;
5749 size_t itemsToMakeLostCount;
5751 VmaAllocationRequestType type;
5753 VkDeviceSize CalcCost()
const
5755 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5763 class VmaBlockMetadata
5767 virtual ~VmaBlockMetadata() { }
5768 virtual void Init(VkDeviceSize size) { m_Size = size; }
5771 virtual bool Validate()
const = 0;
5772 VkDeviceSize GetSize()
const {
return m_Size; }
5773 virtual size_t GetAllocationCount()
const = 0;
5774 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5775 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5777 virtual bool IsEmpty()
const = 0;
5779 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5781 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5783 #if VMA_STATS_STRING_ENABLED
5784 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5790 virtual bool CreateAllocationRequest(
5791 uint32_t currentFrameIndex,
5792 uint32_t frameInUseCount,
5793 VkDeviceSize bufferImageGranularity,
5794 VkDeviceSize allocSize,
5795 VkDeviceSize allocAlignment,
5797 VmaSuballocationType allocType,
5798 bool canMakeOtherLost,
5801 VmaAllocationRequest* pAllocationRequest) = 0;
5803 virtual bool MakeRequestedAllocationsLost(
5804 uint32_t currentFrameIndex,
5805 uint32_t frameInUseCount,
5806 VmaAllocationRequest* pAllocationRequest) = 0;
5808 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5810 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5814 const VmaAllocationRequest& request,
5815 VmaSuballocationType type,
5816 VkDeviceSize allocSize,
5821 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5824 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5826 #if VMA_STATS_STRING_ENABLED
5827 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5828 VkDeviceSize unusedBytes,
5829 size_t allocationCount,
5830 size_t unusedRangeCount)
const;
5831 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5832 VkDeviceSize offset,
5834 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5835 VkDeviceSize offset,
5836 VkDeviceSize size)
const;
5837 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5841 VkDeviceSize m_Size;
5842 const VkAllocationCallbacks* m_pAllocationCallbacks;
5845 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
5846 VMA_ASSERT(0 && "Validation failed: " #cond); \
5850 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5852 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5855 virtual ~VmaBlockMetadata_Generic();
5856 virtual void Init(VkDeviceSize size);
5858 virtual bool Validate()
const;
5859 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5860 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5861 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5862 virtual bool IsEmpty()
const;
5864 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5865 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5867 #if VMA_STATS_STRING_ENABLED
5868 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5871 virtual bool CreateAllocationRequest(
5872 uint32_t currentFrameIndex,
5873 uint32_t frameInUseCount,
5874 VkDeviceSize bufferImageGranularity,
5875 VkDeviceSize allocSize,
5876 VkDeviceSize allocAlignment,
5878 VmaSuballocationType allocType,
5879 bool canMakeOtherLost,
5881 VmaAllocationRequest* pAllocationRequest);
5883 virtual bool MakeRequestedAllocationsLost(
5884 uint32_t currentFrameIndex,
5885 uint32_t frameInUseCount,
5886 VmaAllocationRequest* pAllocationRequest);
5888 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5890 virtual VkResult CheckCorruption(
const void* pBlockData);
5893 const VmaAllocationRequest& request,
5894 VmaSuballocationType type,
5895 VkDeviceSize allocSize,
5899 virtual void FreeAtOffset(VkDeviceSize offset);
5904 bool IsBufferImageGranularityConflictPossible(
5905 VkDeviceSize bufferImageGranularity,
5906 VmaSuballocationType& inOutPrevSuballocType)
const;
5909 friend class VmaDefragmentationAlgorithm_Generic;
5910 friend class VmaDefragmentationAlgorithm_Fast;
5912 uint32_t m_FreeCount;
5913 VkDeviceSize m_SumFreeSize;
5914 VmaSuballocationList m_Suballocations;
5917 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5919 bool ValidateFreeSuballocationList()
const;
5923 bool CheckAllocation(
5924 uint32_t currentFrameIndex,
5925 uint32_t frameInUseCount,
5926 VkDeviceSize bufferImageGranularity,
5927 VkDeviceSize allocSize,
5928 VkDeviceSize allocAlignment,
5929 VmaSuballocationType allocType,
5930 VmaSuballocationList::const_iterator suballocItem,
5931 bool canMakeOtherLost,
5932 VkDeviceSize* pOffset,
5933 size_t* itemsToMakeLostCount,
5934 VkDeviceSize* pSumFreeSize,
5935 VkDeviceSize* pSumItemSize)
const;
5937 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5941 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5944 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5947 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6028 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6030 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6033 virtual ~VmaBlockMetadata_Linear();
6034 virtual void Init(VkDeviceSize size);
6036 virtual bool Validate()
const;
6037 virtual size_t GetAllocationCount()
const;
6038 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6039 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6040 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6042 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6043 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6045 #if VMA_STATS_STRING_ENABLED
6046 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6049 virtual bool CreateAllocationRequest(
6050 uint32_t currentFrameIndex,
6051 uint32_t frameInUseCount,
6052 VkDeviceSize bufferImageGranularity,
6053 VkDeviceSize allocSize,
6054 VkDeviceSize allocAlignment,
6056 VmaSuballocationType allocType,
6057 bool canMakeOtherLost,
6059 VmaAllocationRequest* pAllocationRequest);
6061 virtual bool MakeRequestedAllocationsLost(
6062 uint32_t currentFrameIndex,
6063 uint32_t frameInUseCount,
6064 VmaAllocationRequest* pAllocationRequest);
6066 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6068 virtual VkResult CheckCorruption(
const void* pBlockData);
6071 const VmaAllocationRequest& request,
6072 VmaSuballocationType type,
6073 VkDeviceSize allocSize,
6077 virtual void FreeAtOffset(VkDeviceSize offset);
6087 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6089 enum SECOND_VECTOR_MODE
6091 SECOND_VECTOR_EMPTY,
6096 SECOND_VECTOR_RING_BUFFER,
6102 SECOND_VECTOR_DOUBLE_STACK,
6105 VkDeviceSize m_SumFreeSize;
6106 SuballocationVectorType m_Suballocations0, m_Suballocations1;
6107 uint32_t m_1stVectorIndex;
6108 SECOND_VECTOR_MODE m_2ndVectorMode;
6110 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6111 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6112 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6113 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6116 size_t m_1stNullItemsBeginCount;
6118 size_t m_1stNullItemsMiddleCount;
6120 size_t m_2ndNullItemsCount;
6122 bool ShouldCompact1st()
const;
6123 void CleanupAfterFree();
6125 bool CreateAllocationRequest_LowerAddress(
6126 uint32_t currentFrameIndex,
6127 uint32_t frameInUseCount,
6128 VkDeviceSize bufferImageGranularity,
6129 VkDeviceSize allocSize,
6130 VkDeviceSize allocAlignment,
6131 VmaSuballocationType allocType,
6132 bool canMakeOtherLost,
6134 VmaAllocationRequest* pAllocationRequest);
6135 bool CreateAllocationRequest_UpperAddress(
6136 uint32_t currentFrameIndex,
6137 uint32_t frameInUseCount,
6138 VkDeviceSize bufferImageGranularity,
6139 VkDeviceSize allocSize,
6140 VkDeviceSize allocAlignment,
6141 VmaSuballocationType allocType,
6142 bool canMakeOtherLost,
6144 VmaAllocationRequest* pAllocationRequest);
6158 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6160 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6163 virtual ~VmaBlockMetadata_Buddy();
6164 virtual void Init(VkDeviceSize size);
6166 virtual bool Validate()
const;
6167 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6168 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6169 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6170 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6172 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6173 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6175 #if VMA_STATS_STRING_ENABLED
6176 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6179 virtual bool CreateAllocationRequest(
6180 uint32_t currentFrameIndex,
6181 uint32_t frameInUseCount,
6182 VkDeviceSize bufferImageGranularity,
6183 VkDeviceSize allocSize,
6184 VkDeviceSize allocAlignment,
6186 VmaSuballocationType allocType,
6187 bool canMakeOtherLost,
6189 VmaAllocationRequest* pAllocationRequest);
6191 virtual bool MakeRequestedAllocationsLost(
6192 uint32_t currentFrameIndex,
6193 uint32_t frameInUseCount,
6194 VmaAllocationRequest* pAllocationRequest);
6196 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6198 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6201 const VmaAllocationRequest& request,
6202 VmaSuballocationType type,
6203 VkDeviceSize allocSize,
6206 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6207 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6210 static const VkDeviceSize MIN_NODE_SIZE = 32;
6211 static const size_t MAX_LEVELS = 30;
6213 struct ValidationContext
6215 size_t calculatedAllocationCount;
6216 size_t calculatedFreeCount;
6217 VkDeviceSize calculatedSumFreeSize;
6219 ValidationContext() :
6220 calculatedAllocationCount(0),
6221 calculatedFreeCount(0),
6222 calculatedSumFreeSize(0) { }
6227 VkDeviceSize offset;
6257 VkDeviceSize m_UsableSize;
6258 uint32_t m_LevelCount;
6264 } m_FreeList[MAX_LEVELS];
6266 size_t m_AllocationCount;
6270 VkDeviceSize m_SumFreeSize;
6272 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6273 void DeleteNode(Node* node);
6274 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6275 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6276 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6278 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6279 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6283 void AddToFreeListFront(uint32_t level, Node* node);
6287 void RemoveFromFreeList(uint32_t level, Node* node);
6289 #if VMA_STATS_STRING_ENABLED
6290 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6300 class VmaDeviceMemoryBlock
6302 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6304 VmaBlockMetadata* m_pMetadata;
6308 ~VmaDeviceMemoryBlock()
6310 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6311 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6318 uint32_t newMemoryTypeIndex,
6319 VkDeviceMemory newMemory,
6320 VkDeviceSize newSize,
6322 uint32_t algorithm);
6326 VmaPool GetParentPool()
const {
return m_hParentPool; }
6327 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6328 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6329 uint32_t GetId()
const {
return m_Id; }
6330 void* GetMappedData()
const {
return m_pMappedData; }
6333 bool Validate()
const;
6338 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6341 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6342 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6344 VkResult BindBufferMemory(
6347 VkDeviceSize allocationLocalOffset,
6350 VkResult BindImageMemory(
6353 VkDeviceSize allocationLocalOffset,
6359 uint32_t m_MemoryTypeIndex;
6361 VkDeviceMemory m_hMemory;
6369 uint32_t m_MapCount;
6370 void* m_pMappedData;
6373 struct VmaPointerLess
6375 bool operator()(
const void* lhs,
const void* rhs)
const
6381 struct VmaDefragmentationMove
6383 size_t srcBlockIndex;
6384 size_t dstBlockIndex;
6385 VkDeviceSize srcOffset;
6386 VkDeviceSize dstOffset;
6389 VmaDeviceMemoryBlock* pSrcBlock;
6390 VmaDeviceMemoryBlock* pDstBlock;
6393 class VmaDefragmentationAlgorithm;
6401 struct VmaBlockVector
6403 VMA_CLASS_NO_COPY(VmaBlockVector)
6408 uint32_t memoryTypeIndex,
6409 VkDeviceSize preferredBlockSize,
6410 size_t minBlockCount,
6411 size_t maxBlockCount,
6412 VkDeviceSize bufferImageGranularity,
6413 uint32_t frameInUseCount,
6414 bool explicitBlockSize,
6415 uint32_t algorithm);
6418 VkResult CreateMinBlocks();
6420 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6421 VmaPool GetParentPool()
const {
return m_hParentPool; }
6422 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6423 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6424 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6425 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6426 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6427 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6432 bool IsCorruptionDetectionEnabled()
const;
6435 uint32_t currentFrameIndex,
6437 VkDeviceSize alignment,
6439 VmaSuballocationType suballocType,
6440 size_t allocationCount,
6448 #if VMA_STATS_STRING_ENABLED
6449 void PrintDetailedMap(
class VmaJsonWriter& json);
6452 void MakePoolAllocationsLost(
6453 uint32_t currentFrameIndex,
6454 size_t* pLostAllocationCount);
6455 VkResult CheckCorruption();
6459 class VmaBlockVectorDefragmentationContext* pCtx,
6461 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6462 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6463 VkCommandBuffer commandBuffer);
6464 void DefragmentationEnd(
6465 class VmaBlockVectorDefragmentationContext* pCtx,
6468 uint32_t ProcessDefragmentations(
6469 class VmaBlockVectorDefragmentationContext *pCtx,
6472 void CommitDefragmentations(
6473 class VmaBlockVectorDefragmentationContext *pCtx,
6479 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6480 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6481 size_t CalcAllocationCount()
const;
6482 bool IsBufferImageGranularityConflictPossible()
const;
6485 friend class VmaDefragmentationAlgorithm_Generic;
6489 const uint32_t m_MemoryTypeIndex;
6490 const VkDeviceSize m_PreferredBlockSize;
6491 const size_t m_MinBlockCount;
6492 const size_t m_MaxBlockCount;
6493 const VkDeviceSize m_BufferImageGranularity;
6494 const uint32_t m_FrameInUseCount;
6495 const bool m_ExplicitBlockSize;
6496 const uint32_t m_Algorithm;
6497 VMA_RW_MUTEX m_Mutex;
6501 bool m_HasEmptyBlock;
6503 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6504 uint32_t m_NextBlockId;
6506 VkDeviceSize CalcMaxBlockSize()
const;
6509 void Remove(VmaDeviceMemoryBlock* pBlock);
6513 void IncrementallySortBlocks();
6515 VkResult AllocatePage(
6516 uint32_t currentFrameIndex,
6518 VkDeviceSize alignment,
6520 VmaSuballocationType suballocType,
6524 VkResult AllocateFromBlock(
6525 VmaDeviceMemoryBlock* pBlock,
6526 uint32_t currentFrameIndex,
6528 VkDeviceSize alignment,
6531 VmaSuballocationType suballocType,
6535 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6538 void ApplyDefragmentationMovesCpu(
6539 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6540 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6542 void ApplyDefragmentationMovesGpu(
6543 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6544 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6545 VkCommandBuffer commandBuffer);
6553 void UpdateHasEmptyBlock();
6558 VMA_CLASS_NO_COPY(VmaPool_T)
6560 VmaBlockVector m_BlockVector;
6565 VkDeviceSize preferredBlockSize);
6568 uint32_t GetId()
const {
return m_Id; }
6569 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6571 const char* GetName()
const {
return m_Name; }
6572 void SetName(
const char* pName);
6574 #if VMA_STATS_STRING_ENABLED
6590 class VmaDefragmentationAlgorithm
6592 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6594 VmaDefragmentationAlgorithm(
6596 VmaBlockVector* pBlockVector,
6597 uint32_t currentFrameIndex) :
6598 m_hAllocator(hAllocator),
6599 m_pBlockVector(pBlockVector),
6600 m_CurrentFrameIndex(currentFrameIndex)
6603 virtual ~VmaDefragmentationAlgorithm()
6607 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6608 virtual void AddAll() = 0;
6610 virtual VkResult Defragment(
6611 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6612 VkDeviceSize maxBytesToMove,
6613 uint32_t maxAllocationsToMove,
6616 virtual VkDeviceSize GetBytesMoved()
const = 0;
6617 virtual uint32_t GetAllocationsMoved()
const = 0;
6621 VmaBlockVector*
const m_pBlockVector;
6622 const uint32_t m_CurrentFrameIndex;
6624 struct AllocationInfo
6627 VkBool32* m_pChanged;
6630 m_hAllocation(VK_NULL_HANDLE),
6631 m_pChanged(VMA_NULL)
6635 m_hAllocation(hAlloc),
6636 m_pChanged(pChanged)
6642 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6644 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6646 VmaDefragmentationAlgorithm_Generic(
6648 VmaBlockVector* pBlockVector,
6649 uint32_t currentFrameIndex,
6650 bool overlappingMoveSupported);
6651 virtual ~VmaDefragmentationAlgorithm_Generic();
6653 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6654 virtual void AddAll() { m_AllAllocations =
true; }
6656 virtual VkResult Defragment(
6657 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6658 VkDeviceSize maxBytesToMove,
6659 uint32_t maxAllocationsToMove,
6662 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6663 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6666 uint32_t m_AllocationCount;
6667 bool m_AllAllocations;
6669 VkDeviceSize m_BytesMoved;
6670 uint32_t m_AllocationsMoved;
6672 struct AllocationInfoSizeGreater
6674 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6676 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6680 struct AllocationInfoOffsetGreater
6682 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6684 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6690 size_t m_OriginalBlockIndex;
6691 VmaDeviceMemoryBlock* m_pBlock;
6692 bool m_HasNonMovableAllocations;
6693 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6695 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6696 m_OriginalBlockIndex(SIZE_MAX),
6698 m_HasNonMovableAllocations(true),
6699 m_Allocations(pAllocationCallbacks)
6703 void CalcHasNonMovableAllocations()
6705 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6706 const size_t defragmentAllocCount = m_Allocations.size();
6707 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6710 void SortAllocationsBySizeDescending()
6712 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6715 void SortAllocationsByOffsetDescending()
6717 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6721 struct BlockPointerLess
6723 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
6725 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6727 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6729 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6735 struct BlockInfoCompareMoveDestination
6737 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6739 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6743 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6747 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6755 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6756 BlockInfoVector m_Blocks;
6758 VkResult DefragmentRound(
6759 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6760 VkDeviceSize maxBytesToMove,
6761 uint32_t maxAllocationsToMove,
6762 bool freeOldAllocations);
6764 size_t CalcBlocksWithNonMovableCount()
const;
6766 static bool MoveMakesSense(
6767 size_t dstBlockIndex, VkDeviceSize dstOffset,
6768 size_t srcBlockIndex, VkDeviceSize srcOffset);
6771 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6773 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6775 VmaDefragmentationAlgorithm_Fast(
6777 VmaBlockVector* pBlockVector,
6778 uint32_t currentFrameIndex,
6779 bool overlappingMoveSupported);
6780 virtual ~VmaDefragmentationAlgorithm_Fast();
6782 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6783 virtual void AddAll() { m_AllAllocations =
true; }
6785 virtual VkResult Defragment(
6786 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6787 VkDeviceSize maxBytesToMove,
6788 uint32_t maxAllocationsToMove,
6791 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6792 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6797 size_t origBlockIndex;
6800 class FreeSpaceDatabase
6806 s.blockInfoIndex = SIZE_MAX;
6807 for(
size_t i = 0; i < MAX_COUNT; ++i)
6809 m_FreeSpaces[i] = s;
6813 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6815 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6821 size_t bestIndex = SIZE_MAX;
6822 for(
size_t i = 0; i < MAX_COUNT; ++i)
6825 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6830 if(m_FreeSpaces[i].size < size &&
6831 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6837 if(bestIndex != SIZE_MAX)
6839 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6840 m_FreeSpaces[bestIndex].offset = offset;
6841 m_FreeSpaces[bestIndex].size = size;
6845 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6846 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6848 size_t bestIndex = SIZE_MAX;
6849 VkDeviceSize bestFreeSpaceAfter = 0;
6850 for(
size_t i = 0; i < MAX_COUNT; ++i)
6853 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6855 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6857 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6859 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6861 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6864 bestFreeSpaceAfter = freeSpaceAfter;
6870 if(bestIndex != SIZE_MAX)
6872 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6873 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6875 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6878 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6879 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6880 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6885 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6895 static const size_t MAX_COUNT = 4;
6899 size_t blockInfoIndex;
6900 VkDeviceSize offset;
6902 } m_FreeSpaces[MAX_COUNT];
6905 const bool m_OverlappingMoveSupported;
6907 uint32_t m_AllocationCount;
6908 bool m_AllAllocations;
6910 VkDeviceSize m_BytesMoved;
6911 uint32_t m_AllocationsMoved;
6913 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6915 void PreprocessMetadata();
6916 void PostprocessMetadata();
6917 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6920 struct VmaBlockDefragmentationContext
6924 BLOCK_FLAG_USED = 0x00000001,
6930 class VmaBlockVectorDefragmentationContext
6932 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6936 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6937 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
6938 uint32_t defragmentationMovesProcessed;
6939 uint32_t defragmentationMovesCommitted;
6940 bool hasDefragmentationPlan;
6942 VmaBlockVectorDefragmentationContext(
6945 VmaBlockVector* pBlockVector,
6946 uint32_t currFrameIndex);
6947 ~VmaBlockVectorDefragmentationContext();
6949 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6950 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6951 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6953 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6954 void AddAll() { m_AllAllocations =
true; }
6963 VmaBlockVector*
const m_pBlockVector;
6964 const uint32_t m_CurrFrameIndex;
6966 VmaDefragmentationAlgorithm* m_pAlgorithm;
6974 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6975 bool m_AllAllocations;
6978 struct VmaDefragmentationContext_T
6981 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6983 VmaDefragmentationContext_T(
6985 uint32_t currFrameIndex,
6988 ~VmaDefragmentationContext_T();
6990 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6991 void AddAllocations(
6992 uint32_t allocationCount,
6994 VkBool32* pAllocationsChanged);
7002 VkResult Defragment(
7003 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7004 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7008 VkResult DefragmentPassEnd();
7012 const uint32_t m_CurrFrameIndex;
7013 const uint32_t m_Flags;
7016 VkDeviceSize m_MaxCpuBytesToMove;
7017 uint32_t m_MaxCpuAllocationsToMove;
7018 VkDeviceSize m_MaxGpuBytesToMove;
7019 uint32_t m_MaxGpuAllocationsToMove;
7022 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7024 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7027 #if VMA_RECORDING_ENABLED
7034 void WriteConfiguration(
7035 const VkPhysicalDeviceProperties& devProps,
7036 const VkPhysicalDeviceMemoryProperties& memProps,
7037 uint32_t vulkanApiVersion,
7038 bool dedicatedAllocationExtensionEnabled,
7039 bool bindMemory2ExtensionEnabled,
7040 bool memoryBudgetExtensionEnabled,
7041 bool deviceCoherentMemoryExtensionEnabled);
7044 void RecordCreateAllocator(uint32_t frameIndex);
7045 void RecordDestroyAllocator(uint32_t frameIndex);
7046 void RecordCreatePool(uint32_t frameIndex,
7049 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7050 void RecordAllocateMemory(uint32_t frameIndex,
7051 const VkMemoryRequirements& vkMemReq,
7054 void RecordAllocateMemoryPages(uint32_t frameIndex,
7055 const VkMemoryRequirements& vkMemReq,
7057 uint64_t allocationCount,
7059 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7060 const VkMemoryRequirements& vkMemReq,
7061 bool requiresDedicatedAllocation,
7062 bool prefersDedicatedAllocation,
7065 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7066 const VkMemoryRequirements& vkMemReq,
7067 bool requiresDedicatedAllocation,
7068 bool prefersDedicatedAllocation,
7071 void RecordFreeMemory(uint32_t frameIndex,
7073 void RecordFreeMemoryPages(uint32_t frameIndex,
7074 uint64_t allocationCount,
7076 void RecordSetAllocationUserData(uint32_t frameIndex,
7078 const void* pUserData);
7079 void RecordCreateLostAllocation(uint32_t frameIndex,
7081 void RecordMapMemory(uint32_t frameIndex,
7083 void RecordUnmapMemory(uint32_t frameIndex,
7085 void RecordFlushAllocation(uint32_t frameIndex,
7086 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7087 void RecordInvalidateAllocation(uint32_t frameIndex,
7088 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7089 void RecordCreateBuffer(uint32_t frameIndex,
7090 const VkBufferCreateInfo& bufCreateInfo,
7093 void RecordCreateImage(uint32_t frameIndex,
7094 const VkImageCreateInfo& imageCreateInfo,
7097 void RecordDestroyBuffer(uint32_t frameIndex,
7099 void RecordDestroyImage(uint32_t frameIndex,
7101 void RecordTouchAllocation(uint32_t frameIndex,
7103 void RecordGetAllocationInfo(uint32_t frameIndex,
7105 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
7107 void RecordDefragmentationBegin(uint32_t frameIndex,
7110 void RecordDefragmentationEnd(uint32_t frameIndex,
7112 void RecordSetPoolName(uint32_t frameIndex,
7123 class UserDataString
7127 const char* GetString()
const {
return m_Str; }
7137 VMA_MUTEX m_FileMutex;
7139 int64_t m_StartCounter;
7141 void GetBasicParams(CallParams& outParams);
7144 template<
typename T>
7145 void PrintPointerList(uint64_t count,
const T* pItems)
7149 fprintf(m_File,
"%p", pItems[0]);
7150 for(uint64_t i = 1; i < count; ++i)
7152 fprintf(m_File,
" %p", pItems[i]);
7157 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
7161 #endif // #if VMA_RECORDING_ENABLED
7166 class VmaAllocationObjectAllocator
7168 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
7170 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7172 template<
typename... Types>
VmaAllocation Allocate(Types... args);
7177 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7180 struct VmaCurrentBudgetData
7182 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7183 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7185 #if VMA_MEMORY_BUDGET
7186 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7187 VMA_RW_MUTEX m_BudgetMutex;
7188 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7189 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7190 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7191 #endif // #if VMA_MEMORY_BUDGET
7193 VmaCurrentBudgetData()
7195 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7197 m_BlockBytes[heapIndex] = 0;
7198 m_AllocationBytes[heapIndex] = 0;
7199 #if VMA_MEMORY_BUDGET
7200 m_VulkanUsage[heapIndex] = 0;
7201 m_VulkanBudget[heapIndex] = 0;
7202 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7206 #if VMA_MEMORY_BUDGET
7207 m_OperationsSinceBudgetFetch = 0;
7211 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7213 m_AllocationBytes[heapIndex] += allocationSize;
7214 #if VMA_MEMORY_BUDGET
7215 ++m_OperationsSinceBudgetFetch;
7219 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7221 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7222 m_AllocationBytes[heapIndex] -= allocationSize;
7223 #if VMA_MEMORY_BUDGET
7224 ++m_OperationsSinceBudgetFetch;
7230 struct VmaAllocator_T
7232 VMA_CLASS_NO_COPY(VmaAllocator_T)
7235 uint32_t m_VulkanApiVersion;
7236 bool m_UseKhrDedicatedAllocation;
7237 bool m_UseKhrBindMemory2;
7238 bool m_UseExtMemoryBudget;
7239 bool m_UseAmdDeviceCoherentMemory;
7241 VkInstance m_hInstance;
7242 bool m_AllocationCallbacksSpecified;
7243 VkAllocationCallbacks m_AllocationCallbacks;
7245 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7248 uint32_t m_HeapSizeLimitMask;
7250 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7251 VkPhysicalDeviceMemoryProperties m_MemProps;
7254 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7257 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7258 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7259 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7261 VmaCurrentBudgetData m_Budget;
7267 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7269 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7273 return m_VulkanFunctions;
7276 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
7278 VkDeviceSize GetBufferImageGranularity()
const
7281 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7282 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7285 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7286 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7288 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7290 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7291 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7294 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7296 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7297 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7300 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7302 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7303 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7304 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7307 bool IsIntegratedGpu()
const
7309 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7312 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
7314 #if VMA_RECORDING_ENABLED
7315 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7318 void GetBufferMemoryRequirements(
7320 VkMemoryRequirements& memReq,
7321 bool& requiresDedicatedAllocation,
7322 bool& prefersDedicatedAllocation)
const;
7323 void GetImageMemoryRequirements(
7325 VkMemoryRequirements& memReq,
7326 bool& requiresDedicatedAllocation,
7327 bool& prefersDedicatedAllocation)
const;
7330 VkResult AllocateMemory(
7331 const VkMemoryRequirements& vkMemReq,
7332 bool requiresDedicatedAllocation,
7333 bool prefersDedicatedAllocation,
7334 VkBuffer dedicatedBuffer,
7335 VkImage dedicatedImage,
7337 VmaSuballocationType suballocType,
7338 size_t allocationCount,
7343 size_t allocationCount,
7346 VkResult ResizeAllocation(
7348 VkDeviceSize newSize);
7350 void CalculateStats(
VmaStats* pStats);
7353 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7355 #if VMA_STATS_STRING_ENABLED
7356 void PrintDetailedMap(
class VmaJsonWriter& json);
7359 VkResult DefragmentationBegin(
7363 VkResult DefragmentationEnd(
7366 VkResult DefragmentationPassBegin(
7369 VkResult DefragmentationPassEnd(
7376 void DestroyPool(
VmaPool pool);
7379 void SetCurrentFrameIndex(uint32_t frameIndex);
7380 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7382 void MakePoolAllocationsLost(
7384 size_t* pLostAllocationCount);
7385 VkResult CheckPoolCorruption(
VmaPool hPool);
7386 VkResult CheckCorruption(uint32_t memoryTypeBits);
7391 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7393 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7395 VkResult BindVulkanBuffer(
7396 VkDeviceMemory memory,
7397 VkDeviceSize memoryOffset,
7401 VkResult BindVulkanImage(
7402 VkDeviceMemory memory,
7403 VkDeviceSize memoryOffset,
7410 VkResult BindBufferMemory(
7412 VkDeviceSize allocationLocalOffset,
7415 VkResult BindImageMemory(
7417 VkDeviceSize allocationLocalOffset,
7421 void FlushOrInvalidateAllocation(
7423 VkDeviceSize offset, VkDeviceSize size,
7424 VMA_CACHE_OPERATION op);
7426 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7432 uint32_t GetGpuDefragmentationMemoryTypeBits();
7435 VkDeviceSize m_PreferredLargeHeapBlockSize;
7437 VkPhysicalDevice m_PhysicalDevice;
7438 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7439 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7441 VMA_RW_MUTEX m_PoolsMutex;
7443 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7444 uint32_t m_NextPoolId;
7449 uint32_t m_GlobalMemoryTypeBits;
7451 #if VMA_RECORDING_ENABLED
7452 VmaRecorder* m_pRecorder;
7457 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7459 VkResult AllocateMemoryOfType(
7461 VkDeviceSize alignment,
7462 bool dedicatedAllocation,
7463 VkBuffer dedicatedBuffer,
7464 VkImage dedicatedImage,
7466 uint32_t memTypeIndex,
7467 VmaSuballocationType suballocType,
7468 size_t allocationCount,
7472 VkResult AllocateDedicatedMemoryPage(
7474 VmaSuballocationType suballocType,
7475 uint32_t memTypeIndex,
7476 const VkMemoryAllocateInfo& allocInfo,
7478 bool isUserDataString,
7483 VkResult AllocateDedicatedMemory(
7485 VmaSuballocationType suballocType,
7486 uint32_t memTypeIndex,
7489 bool isUserDataString,
7491 VkBuffer dedicatedBuffer,
7492 VkImage dedicatedImage,
7493 size_t allocationCount,
7502 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7504 uint32_t CalculateGlobalMemoryTypeBits()
const;
7506 #if VMA_MEMORY_BUDGET
7507 void UpdateVulkanBudget();
7508 #endif // #if VMA_MEMORY_BUDGET
7514 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7516 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7519 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7521 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7524 template<
typename T>
7527 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7530 template<
typename T>
7531 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7533 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7536 template<
typename T>
7537 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7542 VmaFree(hAllocator, ptr);
7546 template<
typename T>
7547 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7551 for(
size_t i = count; i--; )
7553 VmaFree(hAllocator, ptr);
7560 #if VMA_STATS_STRING_ENABLED
7562 class VmaStringBuilder
7565 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7566 size_t GetLength()
const {
return m_Data.size(); }
7567 const char* GetData()
const {
return m_Data.data(); }
7569 void Add(
char ch) { m_Data.push_back(ch); }
7570 void Add(
const char* pStr);
7571 void AddNewLine() { Add(
'\n'); }
7572 void AddNumber(uint32_t num);
7573 void AddNumber(uint64_t num);
7574 void AddPointer(
const void* ptr);
7577 VmaVector< char, VmaStlAllocator<char> > m_Data;
7580 void VmaStringBuilder::Add(
const char* pStr)
7582 const size_t strLen = strlen(pStr);
7585 const size_t oldCount = m_Data.size();
7586 m_Data.resize(oldCount + strLen);
7587 memcpy(m_Data.data() + oldCount, pStr, strLen);
7591 void VmaStringBuilder::AddNumber(uint32_t num)
7598 *--p =
'0' + (num % 10);
7605 void VmaStringBuilder::AddNumber(uint64_t num)
7612 *--p =
'0' + (num % 10);
7619 void VmaStringBuilder::AddPointer(
const void* ptr)
7622 VmaPtrToStr(buf,
sizeof(buf), ptr);
7626 #endif // #if VMA_STATS_STRING_ENABLED
7631 #if VMA_STATS_STRING_ENABLED
7635 VMA_CLASS_NO_COPY(VmaJsonWriter)
7637 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7640 void BeginObject(
bool singleLine =
false);
7643 void BeginArray(
bool singleLine =
false);
7646 void WriteString(
const char* pStr);
7647 void BeginString(
const char* pStr = VMA_NULL);
7648 void ContinueString(
const char* pStr);
7649 void ContinueString(uint32_t n);
7650 void ContinueString(uint64_t n);
7651 void ContinueString_Pointer(
const void* ptr);
7652 void EndString(
const char* pStr = VMA_NULL);
7654 void WriteNumber(uint32_t n);
7655 void WriteNumber(uint64_t n);
7656 void WriteBool(
bool b);
7660 static const char*
const INDENT;
7662 enum COLLECTION_TYPE
7664 COLLECTION_TYPE_OBJECT,
7665 COLLECTION_TYPE_ARRAY,
7669 COLLECTION_TYPE type;
7670 uint32_t valueCount;
7671 bool singleLineMode;
7674 VmaStringBuilder& m_SB;
7675 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7676 bool m_InsideString;
7678 void BeginValue(
bool isString);
7679 void WriteIndent(
bool oneLess =
false);
7682 const char*
const VmaJsonWriter::INDENT =
" ";
7684 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7686 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7687 m_InsideString(false)
7691 VmaJsonWriter::~VmaJsonWriter()
7693 VMA_ASSERT(!m_InsideString);
7694 VMA_ASSERT(m_Stack.empty());
7697 void VmaJsonWriter::BeginObject(
bool singleLine)
7699 VMA_ASSERT(!m_InsideString);
7705 item.type = COLLECTION_TYPE_OBJECT;
7706 item.valueCount = 0;
7707 item.singleLineMode = singleLine;
7708 m_Stack.push_back(item);
7711 void VmaJsonWriter::EndObject()
7713 VMA_ASSERT(!m_InsideString);
7718 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7722 void VmaJsonWriter::BeginArray(
bool singleLine)
7724 VMA_ASSERT(!m_InsideString);
7730 item.type = COLLECTION_TYPE_ARRAY;
7731 item.valueCount = 0;
7732 item.singleLineMode = singleLine;
7733 m_Stack.push_back(item);
7736 void VmaJsonWriter::EndArray()
7738 VMA_ASSERT(!m_InsideString);
7743 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7747 void VmaJsonWriter::WriteString(
const char* pStr)
7753 void VmaJsonWriter::BeginString(
const char* pStr)
7755 VMA_ASSERT(!m_InsideString);
7759 m_InsideString =
true;
7760 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7762 ContinueString(pStr);
7766 void VmaJsonWriter::ContinueString(
const char* pStr)
7768 VMA_ASSERT(m_InsideString);
7770 const size_t strLen = strlen(pStr);
7771 for(
size_t i = 0; i < strLen; ++i)
7804 VMA_ASSERT(0 &&
"Character not currently supported.");
7810 void VmaJsonWriter::ContinueString(uint32_t n)
7812 VMA_ASSERT(m_InsideString);
7816 void VmaJsonWriter::ContinueString(uint64_t n)
7818 VMA_ASSERT(m_InsideString);
7822 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7824 VMA_ASSERT(m_InsideString);
7825 m_SB.AddPointer(ptr);
7828 void VmaJsonWriter::EndString(
const char* pStr)
7830 VMA_ASSERT(m_InsideString);
7831 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7833 ContinueString(pStr);
7836 m_InsideString =
false;
7839 void VmaJsonWriter::WriteNumber(uint32_t n)
7841 VMA_ASSERT(!m_InsideString);
7846 void VmaJsonWriter::WriteNumber(uint64_t n)
7848 VMA_ASSERT(!m_InsideString);
7853 void VmaJsonWriter::WriteBool(
bool b)
7855 VMA_ASSERT(!m_InsideString);
7857 m_SB.Add(b ?
"true" :
"false");
7860 void VmaJsonWriter::WriteNull()
7862 VMA_ASSERT(!m_InsideString);
7867 void VmaJsonWriter::BeginValue(
bool isString)
7869 if(!m_Stack.empty())
7871 StackItem& currItem = m_Stack.back();
7872 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7873 currItem.valueCount % 2 == 0)
7875 VMA_ASSERT(isString);
7878 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7879 currItem.valueCount % 2 != 0)
7883 else if(currItem.valueCount > 0)
7892 ++currItem.valueCount;
7896 void VmaJsonWriter::WriteIndent(
bool oneLess)
7898 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7902 size_t count = m_Stack.size();
7903 if(count > 0 && oneLess)
7907 for(
size_t i = 0; i < count; ++i)
7914 #endif // #if VMA_STATS_STRING_ENABLED
7918 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7920 if(IsUserDataString())
7922 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7924 FreeUserDataString(hAllocator);
7926 if(pUserData != VMA_NULL)
7928 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
7933 m_pUserData = pUserData;
7937 void VmaAllocation_T::ChangeBlockAllocation(
7939 VmaDeviceMemoryBlock* block,
7940 VkDeviceSize offset)
7942 VMA_ASSERT(block != VMA_NULL);
7943 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7946 if(block != m_BlockAllocation.m_Block)
7948 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7949 if(IsPersistentMap())
7951 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7952 block->Map(hAllocator, mapRefCount, VMA_NULL);
7955 m_BlockAllocation.m_Block = block;
7956 m_BlockAllocation.m_Offset = offset;
7959 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7961 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7962 m_BlockAllocation.m_Offset = newOffset;
7965 VkDeviceSize VmaAllocation_T::GetOffset()
const
7969 case ALLOCATION_TYPE_BLOCK:
7970 return m_BlockAllocation.m_Offset;
7971 case ALLOCATION_TYPE_DEDICATED:
7979 VkDeviceMemory VmaAllocation_T::GetMemory()
const
7983 case ALLOCATION_TYPE_BLOCK:
7984 return m_BlockAllocation.m_Block->GetDeviceMemory();
7985 case ALLOCATION_TYPE_DEDICATED:
7986 return m_DedicatedAllocation.m_hMemory;
7989 return VK_NULL_HANDLE;
7993 void* VmaAllocation_T::GetMappedData()
const
7997 case ALLOCATION_TYPE_BLOCK:
8000 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8001 VMA_ASSERT(pBlockData != VMA_NULL);
8002 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8009 case ALLOCATION_TYPE_DEDICATED:
8010 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8011 return m_DedicatedAllocation.m_pMappedData;
8018 bool VmaAllocation_T::CanBecomeLost()
const
8022 case ALLOCATION_TYPE_BLOCK:
8023 return m_BlockAllocation.m_CanBecomeLost;
8024 case ALLOCATION_TYPE_DEDICATED:
8032 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8034 VMA_ASSERT(CanBecomeLost());
8040 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8043 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8048 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8054 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8064 #if VMA_STATS_STRING_ENABLED
8067 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
8076 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
8078 json.WriteString(
"Type");
8079 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
8081 json.WriteString(
"Size");
8082 json.WriteNumber(m_Size);
8084 if(m_pUserData != VMA_NULL)
8086 json.WriteString(
"UserData");
8087 if(IsUserDataString())
8089 json.WriteString((
const char*)m_pUserData);
8094 json.ContinueString_Pointer(m_pUserData);
8099 json.WriteString(
"CreationFrameIndex");
8100 json.WriteNumber(m_CreationFrameIndex);
8102 json.WriteString(
"LastUseFrameIndex");
8103 json.WriteNumber(GetLastUseFrameIndex());
8105 if(m_BufferImageUsage != 0)
8107 json.WriteString(
"Usage");
8108 json.WriteNumber(m_BufferImageUsage);
8114 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
8116 VMA_ASSERT(IsUserDataString());
8117 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
8118 m_pUserData = VMA_NULL;
8121 void VmaAllocation_T::BlockAllocMap()
8123 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8125 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8131 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
8135 void VmaAllocation_T::BlockAllocUnmap()
8137 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8139 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8145 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
8149 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
8151 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8155 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8157 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
8158 *ppData = m_DedicatedAllocation.m_pMappedData;
8164 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
8165 return VK_ERROR_MEMORY_MAP_FAILED;
8170 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
8171 hAllocator->m_hDevice,
8172 m_DedicatedAllocation.m_hMemory,
8177 if(result == VK_SUCCESS)
8179 m_DedicatedAllocation.m_pMappedData = *ppData;
8186 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
8188 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8190 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8195 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8196 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8197 hAllocator->m_hDevice,
8198 m_DedicatedAllocation.m_hMemory);
8203 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8207 #if VMA_STATS_STRING_ENABLED
8209 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8213 json.WriteString(
"Blocks");
8216 json.WriteString(
"Allocations");
8219 json.WriteString(
"UnusedRanges");
8222 json.WriteString(
"UsedBytes");
8225 json.WriteString(
"UnusedBytes");
8230 json.WriteString(
"AllocationSize");
8231 json.BeginObject(
true);
8232 json.WriteString(
"Min");
8234 json.WriteString(
"Avg");
8236 json.WriteString(
"Max");
8243 json.WriteString(
"UnusedRangeSize");
8244 json.BeginObject(
true);
8245 json.WriteString(
"Min");
8247 json.WriteString(
"Avg");
8249 json.WriteString(
"Max");
8257 #endif // #if VMA_STATS_STRING_ENABLED
8259 struct VmaSuballocationItemSizeLess
8262 const VmaSuballocationList::iterator lhs,
8263 const VmaSuballocationList::iterator rhs)
const
8265 return lhs->size < rhs->size;
8268 const VmaSuballocationList::iterator lhs,
8269 VkDeviceSize rhsSize)
const
8271 return lhs->size < rhsSize;
8279 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8281 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8285 #if VMA_STATS_STRING_ENABLED
8287 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8288 VkDeviceSize unusedBytes,
8289 size_t allocationCount,
8290 size_t unusedRangeCount)
const
8294 json.WriteString(
"TotalBytes");
8295 json.WriteNumber(GetSize());
8297 json.WriteString(
"UnusedBytes");
8298 json.WriteNumber(unusedBytes);
8300 json.WriteString(
"Allocations");
8301 json.WriteNumber((uint64_t)allocationCount);
8303 json.WriteString(
"UnusedRanges");
8304 json.WriteNumber((uint64_t)unusedRangeCount);
8306 json.WriteString(
"Suballocations");
8310 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8311 VkDeviceSize offset,
8314 json.BeginObject(
true);
8316 json.WriteString(
"Offset");
8317 json.WriteNumber(offset);
8319 hAllocation->PrintParameters(json);
8324 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8325 VkDeviceSize offset,
8326 VkDeviceSize size)
const
8328 json.BeginObject(
true);
8330 json.WriteString(
"Offset");
8331 json.WriteNumber(offset);
8333 json.WriteString(
"Type");
8334 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8336 json.WriteString(
"Size");
8337 json.WriteNumber(size);
8342 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8348 #endif // #if VMA_STATS_STRING_ENABLED
8353 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8354 VmaBlockMetadata(hAllocator),
8357 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8358 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8362 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8366 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8368 VmaBlockMetadata::Init(size);
8371 m_SumFreeSize = size;
8373 VmaSuballocation suballoc = {};
8374 suballoc.offset = 0;
8375 suballoc.size = size;
8376 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8377 suballoc.hAllocation = VK_NULL_HANDLE;
8379 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8380 m_Suballocations.push_back(suballoc);
8381 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8383 m_FreeSuballocationsBySize.push_back(suballocItem);
8386 bool VmaBlockMetadata_Generic::Validate()
const
8388 VMA_VALIDATE(!m_Suballocations.empty());
8391 VkDeviceSize calculatedOffset = 0;
8393 uint32_t calculatedFreeCount = 0;
8395 VkDeviceSize calculatedSumFreeSize = 0;
8398 size_t freeSuballocationsToRegister = 0;
8400 bool prevFree =
false;
8402 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8403 suballocItem != m_Suballocations.cend();
8406 const VmaSuballocation& subAlloc = *suballocItem;
8409 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8411 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8413 VMA_VALIDATE(!prevFree || !currFree);
8415 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8419 calculatedSumFreeSize += subAlloc.size;
8420 ++calculatedFreeCount;
8421 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8423 ++freeSuballocationsToRegister;
8427 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8431 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8432 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8435 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8438 calculatedOffset += subAlloc.size;
8439 prevFree = currFree;
8444 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8446 VkDeviceSize lastSize = 0;
8447 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8449 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8452 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8454 VMA_VALIDATE(suballocItem->size >= lastSize);
8456 lastSize = suballocItem->size;
8460 VMA_VALIDATE(ValidateFreeSuballocationList());
8461 VMA_VALIDATE(calculatedOffset == GetSize());
8462 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8463 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8468 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8470 if(!m_FreeSuballocationsBySize.empty())
8472 return m_FreeSuballocationsBySize.back()->size;
8480 bool VmaBlockMetadata_Generic::IsEmpty()
const
8482 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8485 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8489 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8501 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8502 suballocItem != m_Suballocations.cend();
8505 const VmaSuballocation& suballoc = *suballocItem;
8506 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8519 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8521 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8523 inoutStats.
size += GetSize();
8530 #if VMA_STATS_STRING_ENABLED
8532 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8534 PrintDetailedMap_Begin(json,
8536 m_Suballocations.size() - (
size_t)m_FreeCount,
8540 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8541 suballocItem != m_Suballocations.cend();
8542 ++suballocItem, ++i)
8544 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8546 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8550 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8554 PrintDetailedMap_End(json);
8557 #endif // #if VMA_STATS_STRING_ENABLED
8559 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8560 uint32_t currentFrameIndex,
8561 uint32_t frameInUseCount,
8562 VkDeviceSize bufferImageGranularity,
8563 VkDeviceSize allocSize,
8564 VkDeviceSize allocAlignment,
8566 VmaSuballocationType allocType,
8567 bool canMakeOtherLost,
8569 VmaAllocationRequest* pAllocationRequest)
8571 VMA_ASSERT(allocSize > 0);
8572 VMA_ASSERT(!upperAddress);
8573 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8574 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8575 VMA_HEAVY_ASSERT(Validate());
8577 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8580 if(canMakeOtherLost ==
false &&
8581 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8587 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8588 if(freeSuballocCount > 0)
8593 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8594 m_FreeSuballocationsBySize.data(),
8595 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8596 allocSize + 2 * VMA_DEBUG_MARGIN,
8597 VmaSuballocationItemSizeLess());
8598 size_t index = it - m_FreeSuballocationsBySize.data();
8599 for(; index < freeSuballocCount; ++index)
8604 bufferImageGranularity,
8608 m_FreeSuballocationsBySize[index],
8610 &pAllocationRequest->offset,
8611 &pAllocationRequest->itemsToMakeLostCount,
8612 &pAllocationRequest->sumFreeSize,
8613 &pAllocationRequest->sumItemSize))
8615 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8620 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8622 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8623 it != m_Suballocations.end();
8626 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8629 bufferImageGranularity,
8635 &pAllocationRequest->offset,
8636 &pAllocationRequest->itemsToMakeLostCount,
8637 &pAllocationRequest->sumFreeSize,
8638 &pAllocationRequest->sumItemSize))
8640 pAllocationRequest->item = it;
8648 for(
size_t index = freeSuballocCount; index--; )
8653 bufferImageGranularity,
8657 m_FreeSuballocationsBySize[index],
8659 &pAllocationRequest->offset,
8660 &pAllocationRequest->itemsToMakeLostCount,
8661 &pAllocationRequest->sumFreeSize,
8662 &pAllocationRequest->sumItemSize))
8664 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8671 if(canMakeOtherLost)
8676 VmaAllocationRequest tmpAllocRequest = {};
8677 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8678 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8679 suballocIt != m_Suballocations.end();
8682 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8683 suballocIt->hAllocation->CanBecomeLost())
8688 bufferImageGranularity,
8694 &tmpAllocRequest.offset,
8695 &tmpAllocRequest.itemsToMakeLostCount,
8696 &tmpAllocRequest.sumFreeSize,
8697 &tmpAllocRequest.sumItemSize))
8701 *pAllocationRequest = tmpAllocRequest;
8702 pAllocationRequest->item = suballocIt;
8705 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8707 *pAllocationRequest = tmpAllocRequest;
8708 pAllocationRequest->item = suballocIt;
8721 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8722 uint32_t currentFrameIndex,
8723 uint32_t frameInUseCount,
8724 VmaAllocationRequest* pAllocationRequest)
8726 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8728 while(pAllocationRequest->itemsToMakeLostCount > 0)
8730 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8732 ++pAllocationRequest->item;
8734 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8735 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8736 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8737 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8739 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8740 --pAllocationRequest->itemsToMakeLostCount;
8748 VMA_HEAVY_ASSERT(Validate());
8749 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8750 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8755 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8757 uint32_t lostAllocationCount = 0;
8758 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8759 it != m_Suballocations.end();
8762 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8763 it->hAllocation->CanBecomeLost() &&
8764 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8766 it = FreeSuballocation(it);
8767 ++lostAllocationCount;
8770 return lostAllocationCount;
8773 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8775 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8776 it != m_Suballocations.end();
8779 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8781 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8783 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8784 return VK_ERROR_VALIDATION_FAILED_EXT;
8786 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8788 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8789 return VK_ERROR_VALIDATION_FAILED_EXT;
8797 void VmaBlockMetadata_Generic::Alloc(
8798 const VmaAllocationRequest& request,
8799 VmaSuballocationType type,
8800 VkDeviceSize allocSize,
8803 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8804 VMA_ASSERT(request.item != m_Suballocations.end());
8805 VmaSuballocation& suballoc = *request.item;
8807 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8809 VMA_ASSERT(request.offset >= suballoc.offset);
8810 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8811 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8812 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8816 UnregisterFreeSuballocation(request.item);
8818 suballoc.offset = request.offset;
8819 suballoc.size = allocSize;
8820 suballoc.type = type;
8821 suballoc.hAllocation = hAllocation;
8826 VmaSuballocation paddingSuballoc = {};
8827 paddingSuballoc.offset = request.offset + allocSize;
8828 paddingSuballoc.size = paddingEnd;
8829 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8830 VmaSuballocationList::iterator next = request.item;
8832 const VmaSuballocationList::iterator paddingEndItem =
8833 m_Suballocations.insert(next, paddingSuballoc);
8834 RegisterFreeSuballocation(paddingEndItem);
8840 VmaSuballocation paddingSuballoc = {};
8841 paddingSuballoc.offset = request.offset - paddingBegin;
8842 paddingSuballoc.size = paddingBegin;
8843 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8844 const VmaSuballocationList::iterator paddingBeginItem =
8845 m_Suballocations.insert(request.item, paddingSuballoc);
8846 RegisterFreeSuballocation(paddingBeginItem);
8850 m_FreeCount = m_FreeCount - 1;
8851 if(paddingBegin > 0)
8859 m_SumFreeSize -= allocSize;
8862 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8864 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8865 suballocItem != m_Suballocations.end();
8868 VmaSuballocation& suballoc = *suballocItem;
8869 if(suballoc.hAllocation == allocation)
8871 FreeSuballocation(suballocItem);
8872 VMA_HEAVY_ASSERT(Validate());
8876 VMA_ASSERT(0 &&
"Not found!");
8879 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8881 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8882 suballocItem != m_Suballocations.end();
8885 VmaSuballocation& suballoc = *suballocItem;
8886 if(suballoc.offset == offset)
8888 FreeSuballocation(suballocItem);
8892 VMA_ASSERT(0 &&
"Not found!");
8895 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
8897 VkDeviceSize lastSize = 0;
8898 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8900 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8902 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8903 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8904 VMA_VALIDATE(it->size >= lastSize);
8905 lastSize = it->size;
8910 bool VmaBlockMetadata_Generic::CheckAllocation(
8911 uint32_t currentFrameIndex,
8912 uint32_t frameInUseCount,
8913 VkDeviceSize bufferImageGranularity,
8914 VkDeviceSize allocSize,
8915 VkDeviceSize allocAlignment,
8916 VmaSuballocationType allocType,
8917 VmaSuballocationList::const_iterator suballocItem,
8918 bool canMakeOtherLost,
8919 VkDeviceSize* pOffset,
8920 size_t* itemsToMakeLostCount,
8921 VkDeviceSize* pSumFreeSize,
8922 VkDeviceSize* pSumItemSize)
const
8924 VMA_ASSERT(allocSize > 0);
8925 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8926 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8927 VMA_ASSERT(pOffset != VMA_NULL);
8929 *itemsToMakeLostCount = 0;
8933 if(canMakeOtherLost)
8935 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8937 *pSumFreeSize = suballocItem->size;
8941 if(suballocItem->hAllocation->CanBecomeLost() &&
8942 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8944 ++*itemsToMakeLostCount;
8945 *pSumItemSize = suballocItem->size;
8954 if(GetSize() - suballocItem->offset < allocSize)
8960 *pOffset = suballocItem->offset;
8963 if(VMA_DEBUG_MARGIN > 0)
8965 *pOffset += VMA_DEBUG_MARGIN;
8969 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8973 if(bufferImageGranularity > 1)
8975 bool bufferImageGranularityConflict =
false;
8976 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8977 while(prevSuballocItem != m_Suballocations.cbegin())
8980 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8981 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8983 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8985 bufferImageGranularityConflict =
true;
8993 if(bufferImageGranularityConflict)
8995 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9001 if(*pOffset >= suballocItem->offset + suballocItem->size)
9007 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9010 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9012 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9014 if(suballocItem->offset + totalSize > GetSize())
9021 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9022 if(totalSize > suballocItem->size)
9024 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9025 while(remainingSize > 0)
9028 if(lastSuballocItem == m_Suballocations.cend())
9032 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9034 *pSumFreeSize += lastSuballocItem->size;
9038 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9039 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9040 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9042 ++*itemsToMakeLostCount;
9043 *pSumItemSize += lastSuballocItem->size;
9050 remainingSize = (lastSuballocItem->size < remainingSize) ?
9051 remainingSize - lastSuballocItem->size : 0;
9057 if(bufferImageGranularity > 1)
9059 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9061 while(nextSuballocItem != m_Suballocations.cend())
9063 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9064 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9066 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9068 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9069 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9070 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9072 ++*itemsToMakeLostCount;
9091 const VmaSuballocation& suballoc = *suballocItem;
9092 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9094 *pSumFreeSize = suballoc.size;
9097 if(suballoc.size < allocSize)
9103 *pOffset = suballoc.offset;
9106 if(VMA_DEBUG_MARGIN > 0)
9108 *pOffset += VMA_DEBUG_MARGIN;
9112 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9116 if(bufferImageGranularity > 1)
9118 bool bufferImageGranularityConflict =
false;
9119 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9120 while(prevSuballocItem != m_Suballocations.cbegin())
9123 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9124 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9126 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9128 bufferImageGranularityConflict =
true;
9136 if(bufferImageGranularityConflict)
9138 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9143 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
9146 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9149 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
9156 if(bufferImageGranularity > 1)
9158 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
9160 while(nextSuballocItem != m_Suballocations.cend())
9162 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9163 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9165 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9184 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
9186 VMA_ASSERT(item != m_Suballocations.end());
9187 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9189 VmaSuballocationList::iterator nextItem = item;
9191 VMA_ASSERT(nextItem != m_Suballocations.end());
9192 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9194 item->size += nextItem->size;
9196 m_Suballocations.erase(nextItem);
9199 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9202 VmaSuballocation& suballoc = *suballocItem;
9203 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9204 suballoc.hAllocation = VK_NULL_HANDLE;
9208 m_SumFreeSize += suballoc.size;
9211 bool mergeWithNext =
false;
9212 bool mergeWithPrev =
false;
9214 VmaSuballocationList::iterator nextItem = suballocItem;
9216 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9218 mergeWithNext =
true;
9221 VmaSuballocationList::iterator prevItem = suballocItem;
9222 if(suballocItem != m_Suballocations.begin())
9225 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9227 mergeWithPrev =
true;
9233 UnregisterFreeSuballocation(nextItem);
9234 MergeFreeWithNext(suballocItem);
9239 UnregisterFreeSuballocation(prevItem);
9240 MergeFreeWithNext(prevItem);
9241 RegisterFreeSuballocation(prevItem);
9246 RegisterFreeSuballocation(suballocItem);
9247 return suballocItem;
9251 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9253 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9254 VMA_ASSERT(item->size > 0);
9258 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9260 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9262 if(m_FreeSuballocationsBySize.empty())
9264 m_FreeSuballocationsBySize.push_back(item);
9268 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9276 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9278 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9279 VMA_ASSERT(item->size > 0);
9283 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9285 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9287 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9288 m_FreeSuballocationsBySize.data(),
9289 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9291 VmaSuballocationItemSizeLess());
9292 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9293 index < m_FreeSuballocationsBySize.size();
9296 if(m_FreeSuballocationsBySize[index] == item)
9298 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9301 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9303 VMA_ASSERT(0 &&
"Not found.");
9309 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9310 VkDeviceSize bufferImageGranularity,
9311 VmaSuballocationType& inOutPrevSuballocType)
const
9313 if(bufferImageGranularity == 1 || IsEmpty())
9318 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9319 bool typeConflictFound =
false;
9320 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9321 it != m_Suballocations.cend();
9324 const VmaSuballocationType suballocType = it->type;
9325 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9327 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9328 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9330 typeConflictFound =
true;
9332 inOutPrevSuballocType = suballocType;
9336 return typeConflictFound || minAlignment >= bufferImageGranularity;
9342 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9343 VmaBlockMetadata(hAllocator),
9345 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9346 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9347 m_1stVectorIndex(0),
9348 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9349 m_1stNullItemsBeginCount(0),
9350 m_1stNullItemsMiddleCount(0),
9351 m_2ndNullItemsCount(0)
9355 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9359 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9361 VmaBlockMetadata::Init(size);
9362 m_SumFreeSize = size;
9365 bool VmaBlockMetadata_Linear::Validate()
const
9367 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9368 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9370 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9371 VMA_VALIDATE(!suballocations1st.empty() ||
9372 suballocations2nd.empty() ||
9373 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9375 if(!suballocations1st.empty())
9378 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9380 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9382 if(!suballocations2nd.empty())
9385 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9388 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9389 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9391 VkDeviceSize sumUsedSize = 0;
9392 const size_t suballoc1stCount = suballocations1st.size();
9393 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9395 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9397 const size_t suballoc2ndCount = suballocations2nd.size();
9398 size_t nullItem2ndCount = 0;
9399 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9401 const VmaSuballocation& suballoc = suballocations2nd[i];
9402 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9404 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9405 VMA_VALIDATE(suballoc.offset >= offset);
9409 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9410 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9411 sumUsedSize += suballoc.size;
9418 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9421 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9424 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9426 const VmaSuballocation& suballoc = suballocations1st[i];
9427 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9428 suballoc.hAllocation == VK_NULL_HANDLE);
9431 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9433 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9435 const VmaSuballocation& suballoc = suballocations1st[i];
9436 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9438 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9439 VMA_VALIDATE(suballoc.offset >= offset);
9440 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9444 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9445 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9446 sumUsedSize += suballoc.size;
9453 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9455 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9457 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9459 const size_t suballoc2ndCount = suballocations2nd.size();
9460 size_t nullItem2ndCount = 0;
9461 for(
size_t i = suballoc2ndCount; i--; )
9463 const VmaSuballocation& suballoc = suballocations2nd[i];
9464 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9466 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9467 VMA_VALIDATE(suballoc.offset >= offset);
9471 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9472 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9473 sumUsedSize += suballoc.size;
9480 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9483 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9486 VMA_VALIDATE(offset <= GetSize());
9487 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9492 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9494 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9495 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9498 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9500 const VkDeviceSize size = GetSize();
9512 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9514 switch(m_2ndVectorMode)
9516 case SECOND_VECTOR_EMPTY:
9522 const size_t suballocations1stCount = suballocations1st.size();
9523 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9524 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9525 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9527 firstSuballoc.offset,
9528 size - (lastSuballoc.offset + lastSuballoc.size));
9532 case SECOND_VECTOR_RING_BUFFER:
9537 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9538 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9539 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9540 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9544 case SECOND_VECTOR_DOUBLE_STACK:
9549 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9550 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9551 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9552 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9562 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9564 const VkDeviceSize size = GetSize();
9565 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9566 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9567 const size_t suballoc1stCount = suballocations1st.size();
9568 const size_t suballoc2ndCount = suballocations2nd.size();
9579 VkDeviceSize lastOffset = 0;
9581 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9583 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9584 size_t nextAlloc2ndIndex = 0;
9585 while(lastOffset < freeSpace2ndTo1stEnd)
9588 while(nextAlloc2ndIndex < suballoc2ndCount &&
9589 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9591 ++nextAlloc2ndIndex;
9595 if(nextAlloc2ndIndex < suballoc2ndCount)
9597 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9600 if(lastOffset < suballoc.offset)
9603 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9617 lastOffset = suballoc.offset + suballoc.size;
9618 ++nextAlloc2ndIndex;
9624 if(lastOffset < freeSpace2ndTo1stEnd)
9626 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9634 lastOffset = freeSpace2ndTo1stEnd;
9639 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9640 const VkDeviceSize freeSpace1stTo2ndEnd =
9641 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9642 while(lastOffset < freeSpace1stTo2ndEnd)
9645 while(nextAlloc1stIndex < suballoc1stCount &&
9646 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9648 ++nextAlloc1stIndex;
9652 if(nextAlloc1stIndex < suballoc1stCount)
9654 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9657 if(lastOffset < suballoc.offset)
9660 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9674 lastOffset = suballoc.offset + suballoc.size;
9675 ++nextAlloc1stIndex;
9681 if(lastOffset < freeSpace1stTo2ndEnd)
9683 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9691 lastOffset = freeSpace1stTo2ndEnd;
9695 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9697 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9698 while(lastOffset < size)
9701 while(nextAlloc2ndIndex != SIZE_MAX &&
9702 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9704 --nextAlloc2ndIndex;
9708 if(nextAlloc2ndIndex != SIZE_MAX)
9710 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9713 if(lastOffset < suballoc.offset)
9716 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9730 lastOffset = suballoc.offset + suballoc.size;
9731 --nextAlloc2ndIndex;
9737 if(lastOffset < size)
9739 const VkDeviceSize unusedRangeSize = size - lastOffset;
9755 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
9757 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9758 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9759 const VkDeviceSize size = GetSize();
9760 const size_t suballoc1stCount = suballocations1st.size();
9761 const size_t suballoc2ndCount = suballocations2nd.size();
9763 inoutStats.
size += size;
9765 VkDeviceSize lastOffset = 0;
9767 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9769 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9770 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9771 while(lastOffset < freeSpace2ndTo1stEnd)
9774 while(nextAlloc2ndIndex < suballoc2ndCount &&
9775 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9777 ++nextAlloc2ndIndex;
9781 if(nextAlloc2ndIndex < suballoc2ndCount)
9783 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9786 if(lastOffset < suballoc.offset)
9789 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9800 lastOffset = suballoc.offset + suballoc.size;
9801 ++nextAlloc2ndIndex;
9806 if(lastOffset < freeSpace2ndTo1stEnd)
9809 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9816 lastOffset = freeSpace2ndTo1stEnd;
9821 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9822 const VkDeviceSize freeSpace1stTo2ndEnd =
9823 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9824 while(lastOffset < freeSpace1stTo2ndEnd)
9827 while(nextAlloc1stIndex < suballoc1stCount &&
9828 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9830 ++nextAlloc1stIndex;
9834 if(nextAlloc1stIndex < suballoc1stCount)
9836 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9839 if(lastOffset < suballoc.offset)
9842 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9853 lastOffset = suballoc.offset + suballoc.size;
9854 ++nextAlloc1stIndex;
9859 if(lastOffset < freeSpace1stTo2ndEnd)
9862 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9869 lastOffset = freeSpace1stTo2ndEnd;
9873 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9875 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9876 while(lastOffset < size)
9879 while(nextAlloc2ndIndex != SIZE_MAX &&
9880 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9882 --nextAlloc2ndIndex;
9886 if(nextAlloc2ndIndex != SIZE_MAX)
9888 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9891 if(lastOffset < suballoc.offset)
9894 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9905 lastOffset = suballoc.offset + suballoc.size;
9906 --nextAlloc2ndIndex;
9911 if(lastOffset < size)
9914 const VkDeviceSize unusedRangeSize = size - lastOffset;
9927 #if VMA_STATS_STRING_ENABLED
9928 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
9930 const VkDeviceSize size = GetSize();
9931 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9932 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9933 const size_t suballoc1stCount = suballocations1st.size();
9934 const size_t suballoc2ndCount = suballocations2nd.size();
9938 size_t unusedRangeCount = 0;
9939 VkDeviceSize usedBytes = 0;
9941 VkDeviceSize lastOffset = 0;
9943 size_t alloc2ndCount = 0;
9944 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9946 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9947 size_t nextAlloc2ndIndex = 0;
9948 while(lastOffset < freeSpace2ndTo1stEnd)
9951 while(nextAlloc2ndIndex < suballoc2ndCount &&
9952 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9954 ++nextAlloc2ndIndex;
9958 if(nextAlloc2ndIndex < suballoc2ndCount)
9960 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9963 if(lastOffset < suballoc.offset)
9972 usedBytes += suballoc.size;
9975 lastOffset = suballoc.offset + suballoc.size;
9976 ++nextAlloc2ndIndex;
9981 if(lastOffset < freeSpace2ndTo1stEnd)
9988 lastOffset = freeSpace2ndTo1stEnd;
9993 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9994 size_t alloc1stCount = 0;
9995 const VkDeviceSize freeSpace1stTo2ndEnd =
9996 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9997 while(lastOffset < freeSpace1stTo2ndEnd)
10000 while(nextAlloc1stIndex < suballoc1stCount &&
10001 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10003 ++nextAlloc1stIndex;
10007 if(nextAlloc1stIndex < suballoc1stCount)
10009 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10012 if(lastOffset < suballoc.offset)
10015 ++unusedRangeCount;
10021 usedBytes += suballoc.size;
10024 lastOffset = suballoc.offset + suballoc.size;
10025 ++nextAlloc1stIndex;
10030 if(lastOffset < size)
10033 ++unusedRangeCount;
10037 lastOffset = freeSpace1stTo2ndEnd;
10041 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10043 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10044 while(lastOffset < size)
10047 while(nextAlloc2ndIndex != SIZE_MAX &&
10048 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10050 --nextAlloc2ndIndex;
10054 if(nextAlloc2ndIndex != SIZE_MAX)
10056 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10059 if(lastOffset < suballoc.offset)
10062 ++unusedRangeCount;
10068 usedBytes += suballoc.size;
10071 lastOffset = suballoc.offset + suballoc.size;
10072 --nextAlloc2ndIndex;
10077 if(lastOffset < size)
10080 ++unusedRangeCount;
10089 const VkDeviceSize unusedBytes = size - usedBytes;
10090 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
10095 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10097 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10098 size_t nextAlloc2ndIndex = 0;
10099 while(lastOffset < freeSpace2ndTo1stEnd)
10102 while(nextAlloc2ndIndex < suballoc2ndCount &&
10103 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10105 ++nextAlloc2ndIndex;
10109 if(nextAlloc2ndIndex < suballoc2ndCount)
10111 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10114 if(lastOffset < suballoc.offset)
10117 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10118 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10123 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10126 lastOffset = suballoc.offset + suballoc.size;
10127 ++nextAlloc2ndIndex;
10132 if(lastOffset < freeSpace2ndTo1stEnd)
10135 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10136 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10140 lastOffset = freeSpace2ndTo1stEnd;
10145 nextAlloc1stIndex = m_1stNullItemsBeginCount;
10146 while(lastOffset < freeSpace1stTo2ndEnd)
10149 while(nextAlloc1stIndex < suballoc1stCount &&
10150 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10152 ++nextAlloc1stIndex;
10156 if(nextAlloc1stIndex < suballoc1stCount)
10158 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10161 if(lastOffset < suballoc.offset)
10164 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10165 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10170 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10173 lastOffset = suballoc.offset + suballoc.size;
10174 ++nextAlloc1stIndex;
10179 if(lastOffset < freeSpace1stTo2ndEnd)
10182 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10183 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10187 lastOffset = freeSpace1stTo2ndEnd;
10191 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10193 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10194 while(lastOffset < size)
10197 while(nextAlloc2ndIndex != SIZE_MAX &&
10198 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10200 --nextAlloc2ndIndex;
10204 if(nextAlloc2ndIndex != SIZE_MAX)
10206 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10209 if(lastOffset < suballoc.offset)
10212 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10213 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10218 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10221 lastOffset = suballoc.offset + suballoc.size;
10222 --nextAlloc2ndIndex;
10227 if(lastOffset < size)
10230 const VkDeviceSize unusedRangeSize = size - lastOffset;
10231 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10240 PrintDetailedMap_End(json);
10242 #endif // #if VMA_STATS_STRING_ENABLED
10244 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10245 uint32_t currentFrameIndex,
10246 uint32_t frameInUseCount,
10247 VkDeviceSize bufferImageGranularity,
10248 VkDeviceSize allocSize,
10249 VkDeviceSize allocAlignment,
10251 VmaSuballocationType allocType,
10252 bool canMakeOtherLost,
10254 VmaAllocationRequest* pAllocationRequest)
10256 VMA_ASSERT(allocSize > 0);
10257 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10258 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10259 VMA_HEAVY_ASSERT(Validate());
10260 return upperAddress ?
10261 CreateAllocationRequest_UpperAddress(
10262 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10263 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10264 CreateAllocationRequest_LowerAddress(
10265 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10266 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10269 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10270 uint32_t currentFrameIndex,
10271 uint32_t frameInUseCount,
10272 VkDeviceSize bufferImageGranularity,
10273 VkDeviceSize allocSize,
10274 VkDeviceSize allocAlignment,
10275 VmaSuballocationType allocType,
10276 bool canMakeOtherLost,
10278 VmaAllocationRequest* pAllocationRequest)
10280 const VkDeviceSize size = GetSize();
10281 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10282 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10284 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10286 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10291 if(allocSize > size)
10295 VkDeviceSize resultBaseOffset = size - allocSize;
10296 if(!suballocations2nd.empty())
10298 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10299 resultBaseOffset = lastSuballoc.offset - allocSize;
10300 if(allocSize > lastSuballoc.offset)
10307 VkDeviceSize resultOffset = resultBaseOffset;
10310 if(VMA_DEBUG_MARGIN > 0)
10312 if(resultOffset < VMA_DEBUG_MARGIN)
10316 resultOffset -= VMA_DEBUG_MARGIN;
10320 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10324 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10326 bool bufferImageGranularityConflict =
false;
10327 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10329 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10330 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10332 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10334 bufferImageGranularityConflict =
true;
10342 if(bufferImageGranularityConflict)
10344 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10349 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10350 suballocations1st.back().offset + suballocations1st.back().size :
10352 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10356 if(bufferImageGranularity > 1)
10358 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10360 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10361 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10363 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10377 pAllocationRequest->offset = resultOffset;
10378 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10379 pAllocationRequest->sumItemSize = 0;
10381 pAllocationRequest->itemsToMakeLostCount = 0;
10382 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10389 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10390 uint32_t currentFrameIndex,
10391 uint32_t frameInUseCount,
10392 VkDeviceSize bufferImageGranularity,
10393 VkDeviceSize allocSize,
10394 VkDeviceSize allocAlignment,
10395 VmaSuballocationType allocType,
10396 bool canMakeOtherLost,
10398 VmaAllocationRequest* pAllocationRequest)
10400 const VkDeviceSize size = GetSize();
10401 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10402 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10404 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10408 VkDeviceSize resultBaseOffset = 0;
10409 if(!suballocations1st.empty())
10411 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10412 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10416 VkDeviceSize resultOffset = resultBaseOffset;
10419 if(VMA_DEBUG_MARGIN > 0)
10421 resultOffset += VMA_DEBUG_MARGIN;
10425 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10429 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10431 bool bufferImageGranularityConflict =
false;
10432 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10434 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10435 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10437 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10439 bufferImageGranularityConflict =
true;
10447 if(bufferImageGranularityConflict)
10449 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10453 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10454 suballocations2nd.back().offset : size;
10457 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10461 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10463 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10465 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10466 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10468 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10482 pAllocationRequest->offset = resultOffset;
10483 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10484 pAllocationRequest->sumItemSize = 0;
10486 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10487 pAllocationRequest->itemsToMakeLostCount = 0;
10494 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10496 VMA_ASSERT(!suballocations1st.empty());
10498 VkDeviceSize resultBaseOffset = 0;
10499 if(!suballocations2nd.empty())
10501 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10502 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10506 VkDeviceSize resultOffset = resultBaseOffset;
10509 if(VMA_DEBUG_MARGIN > 0)
10511 resultOffset += VMA_DEBUG_MARGIN;
10515 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10519 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10521 bool bufferImageGranularityConflict =
false;
10522 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10524 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10525 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10527 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10529 bufferImageGranularityConflict =
true;
10537 if(bufferImageGranularityConflict)
10539 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10543 pAllocationRequest->itemsToMakeLostCount = 0;
10544 pAllocationRequest->sumItemSize = 0;
10545 size_t index1st = m_1stNullItemsBeginCount;
10547 if(canMakeOtherLost)
10549 while(index1st < suballocations1st.size() &&
10550 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10553 const VmaSuballocation& suballoc = suballocations1st[index1st];
10554 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10560 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10561 if(suballoc.hAllocation->CanBecomeLost() &&
10562 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10564 ++pAllocationRequest->itemsToMakeLostCount;
10565 pAllocationRequest->sumItemSize += suballoc.size;
10577 if(bufferImageGranularity > 1)
10579 while(index1st < suballocations1st.size())
10581 const VmaSuballocation& suballoc = suballocations1st[index1st];
10582 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10584 if(suballoc.hAllocation != VK_NULL_HANDLE)
10587 if(suballoc.hAllocation->CanBecomeLost() &&
10588 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10590 ++pAllocationRequest->itemsToMakeLostCount;
10591 pAllocationRequest->sumItemSize += suballoc.size;
10609 if(index1st == suballocations1st.size() &&
10610 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10613 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10618 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10619 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10623 if(bufferImageGranularity > 1)
10625 for(
size_t nextSuballocIndex = index1st;
10626 nextSuballocIndex < suballocations1st.size();
10627 nextSuballocIndex++)
10629 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10630 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10632 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10646 pAllocationRequest->offset = resultOffset;
10647 pAllocationRequest->sumFreeSize =
10648 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10650 - pAllocationRequest->sumItemSize;
10651 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10660 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10661 uint32_t currentFrameIndex,
10662 uint32_t frameInUseCount,
10663 VmaAllocationRequest* pAllocationRequest)
10665 if(pAllocationRequest->itemsToMakeLostCount == 0)
10670 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10673 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10674 size_t index = m_1stNullItemsBeginCount;
10675 size_t madeLostCount = 0;
10676 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10678 if(index == suballocations->size())
10682 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10684 suballocations = &AccessSuballocations2nd();
10688 VMA_ASSERT(!suballocations->empty());
10690 VmaSuballocation& suballoc = (*suballocations)[index];
10691 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10693 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10694 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10695 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10697 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10698 suballoc.hAllocation = VK_NULL_HANDLE;
10699 m_SumFreeSize += suballoc.size;
10700 if(suballocations == &AccessSuballocations1st())
10702 ++m_1stNullItemsMiddleCount;
10706 ++m_2ndNullItemsCount;
10718 CleanupAfterFree();
10724 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10726 uint32_t lostAllocationCount = 0;
10728 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10729 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10731 VmaSuballocation& suballoc = suballocations1st[i];
10732 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10733 suballoc.hAllocation->CanBecomeLost() &&
10734 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10736 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10737 suballoc.hAllocation = VK_NULL_HANDLE;
10738 ++m_1stNullItemsMiddleCount;
10739 m_SumFreeSize += suballoc.size;
10740 ++lostAllocationCount;
10744 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10745 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10747 VmaSuballocation& suballoc = suballocations2nd[i];
10748 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10749 suballoc.hAllocation->CanBecomeLost() &&
10750 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10752 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10753 suballoc.hAllocation = VK_NULL_HANDLE;
10754 ++m_2ndNullItemsCount;
10755 m_SumFreeSize += suballoc.size;
10756 ++lostAllocationCount;
10760 if(lostAllocationCount)
10762 CleanupAfterFree();
10765 return lostAllocationCount;
10768 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10770 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10771 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10773 const VmaSuballocation& suballoc = suballocations1st[i];
10774 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10776 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10778 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10779 return VK_ERROR_VALIDATION_FAILED_EXT;
10781 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10783 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10784 return VK_ERROR_VALIDATION_FAILED_EXT;
10789 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10790 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10792 const VmaSuballocation& suballoc = suballocations2nd[i];
10793 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10795 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10797 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10798 return VK_ERROR_VALIDATION_FAILED_EXT;
10800 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10802 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10803 return VK_ERROR_VALIDATION_FAILED_EXT;
10811 void VmaBlockMetadata_Linear::Alloc(
10812 const VmaAllocationRequest& request,
10813 VmaSuballocationType type,
10814 VkDeviceSize allocSize,
10817 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10819 switch(request.type)
10821 case VmaAllocationRequestType::UpperAddress:
10823 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10824 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10825 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10826 suballocations2nd.push_back(newSuballoc);
10827 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10830 case VmaAllocationRequestType::EndOf1st:
10832 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10834 VMA_ASSERT(suballocations1st.empty() ||
10835 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10837 VMA_ASSERT(request.offset + allocSize <= GetSize());
10839 suballocations1st.push_back(newSuballoc);
10842 case VmaAllocationRequestType::EndOf2nd:
10844 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10846 VMA_ASSERT(!suballocations1st.empty() &&
10847 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10848 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10850 switch(m_2ndVectorMode)
10852 case SECOND_VECTOR_EMPTY:
10854 VMA_ASSERT(suballocations2nd.empty());
10855 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10857 case SECOND_VECTOR_RING_BUFFER:
10859 VMA_ASSERT(!suballocations2nd.empty());
10861 case SECOND_VECTOR_DOUBLE_STACK:
10862 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10868 suballocations2nd.push_back(newSuballoc);
10872 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10875 m_SumFreeSize -= newSuballoc.size;
10878 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10880 FreeAtOffset(allocation->GetOffset());
10883 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10885 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10886 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10888 if(!suballocations1st.empty())
10891 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10892 if(firstSuballoc.offset == offset)
10894 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10895 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10896 m_SumFreeSize += firstSuballoc.size;
10897 ++m_1stNullItemsBeginCount;
10898 CleanupAfterFree();
10904 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10905 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10907 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10908 if(lastSuballoc.offset == offset)
10910 m_SumFreeSize += lastSuballoc.size;
10911 suballocations2nd.pop_back();
10912 CleanupAfterFree();
10917 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10919 VmaSuballocation& lastSuballoc = suballocations1st.back();
10920 if(lastSuballoc.offset == offset)
10922 m_SumFreeSize += lastSuballoc.size;
10923 suballocations1st.pop_back();
10924 CleanupAfterFree();
10931 VmaSuballocation refSuballoc;
10932 refSuballoc.offset = offset;
10934 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10935 suballocations1st.begin() + m_1stNullItemsBeginCount,
10936 suballocations1st.end(),
10938 VmaSuballocationOffsetLess());
10939 if(it != suballocations1st.end())
10941 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10942 it->hAllocation = VK_NULL_HANDLE;
10943 ++m_1stNullItemsMiddleCount;
10944 m_SumFreeSize += it->size;
10945 CleanupAfterFree();
10950 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10953 VmaSuballocation refSuballoc;
10954 refSuballoc.offset = offset;
10956 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10957 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10958 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10959 if(it != suballocations2nd.end())
10961 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10962 it->hAllocation = VK_NULL_HANDLE;
10963 ++m_2ndNullItemsCount;
10964 m_SumFreeSize += it->size;
10965 CleanupAfterFree();
10970 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10973 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
10975 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10976 const size_t suballocCount = AccessSuballocations1st().size();
10977 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10980 void VmaBlockMetadata_Linear::CleanupAfterFree()
10982 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10983 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10987 suballocations1st.clear();
10988 suballocations2nd.clear();
10989 m_1stNullItemsBeginCount = 0;
10990 m_1stNullItemsMiddleCount = 0;
10991 m_2ndNullItemsCount = 0;
10992 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10996 const size_t suballoc1stCount = suballocations1st.size();
10997 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10998 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11001 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11002 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11004 ++m_1stNullItemsBeginCount;
11005 --m_1stNullItemsMiddleCount;
11009 while(m_1stNullItemsMiddleCount > 0 &&
11010 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11012 --m_1stNullItemsMiddleCount;
11013 suballocations1st.pop_back();
11017 while(m_2ndNullItemsCount > 0 &&
11018 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11020 --m_2ndNullItemsCount;
11021 suballocations2nd.pop_back();
11025 while(m_2ndNullItemsCount > 0 &&
11026 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11028 --m_2ndNullItemsCount;
11029 VmaVectorRemove(suballocations2nd, 0);
11032 if(ShouldCompact1st())
11034 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11035 size_t srcIndex = m_1stNullItemsBeginCount;
11036 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11038 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11042 if(dstIndex != srcIndex)
11044 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11048 suballocations1st.resize(nonNullItemCount);
11049 m_1stNullItemsBeginCount = 0;
11050 m_1stNullItemsMiddleCount = 0;
11054 if(suballocations2nd.empty())
11056 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11060 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11062 suballocations1st.clear();
11063 m_1stNullItemsBeginCount = 0;
11065 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11068 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11069 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11070 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
11071 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11073 ++m_1stNullItemsBeginCount;
11074 --m_1stNullItemsMiddleCount;
11076 m_2ndNullItemsCount = 0;
11077 m_1stVectorIndex ^= 1;
11082 VMA_HEAVY_ASSERT(Validate());
11089 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
11090 VmaBlockMetadata(hAllocator),
11092 m_AllocationCount(0),
11096 memset(m_FreeList, 0,
sizeof(m_FreeList));
11099 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
11101 DeleteNode(m_Root);
11104 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
11106 VmaBlockMetadata::Init(size);
11108 m_UsableSize = VmaPrevPow2(size);
11109 m_SumFreeSize = m_UsableSize;
11113 while(m_LevelCount < MAX_LEVELS &&
11114 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
11119 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
11120 rootNode->offset = 0;
11121 rootNode->type = Node::TYPE_FREE;
11122 rootNode->parent = VMA_NULL;
11123 rootNode->buddy = VMA_NULL;
11126 AddToFreeListFront(0, rootNode);
11129 bool VmaBlockMetadata_Buddy::Validate()
const
11132 ValidationContext ctx;
11133 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
11135 VMA_VALIDATE(
false &&
"ValidateNode failed.");
11137 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
11138 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
11141 for(uint32_t level = 0; level < m_LevelCount; ++level)
11143 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
11144 m_FreeList[level].front->free.prev == VMA_NULL);
11146 for(Node* node = m_FreeList[level].front;
11148 node = node->free.next)
11150 VMA_VALIDATE(node->type == Node::TYPE_FREE);
11152 if(node->free.next == VMA_NULL)
11154 VMA_VALIDATE(m_FreeList[level].back == node);
11158 VMA_VALIDATE(node->free.next->free.prev == node);
11164 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
11166 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
11172 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
11174 for(uint32_t level = 0; level < m_LevelCount; ++level)
11176 if(m_FreeList[level].front != VMA_NULL)
11178 return LevelToNodeSize(level);
11184 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
11186 const VkDeviceSize unusableSize = GetUnusableSize();
11197 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11199 if(unusableSize > 0)
11208 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11210 const VkDeviceSize unusableSize = GetUnusableSize();
11212 inoutStats.
size += GetSize();
11213 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11218 if(unusableSize > 0)
11225 #if VMA_STATS_STRING_ENABLED
11227 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11231 CalcAllocationStatInfo(stat);
11233 PrintDetailedMap_Begin(
11239 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11241 const VkDeviceSize unusableSize = GetUnusableSize();
11242 if(unusableSize > 0)
11244 PrintDetailedMap_UnusedRange(json,
11249 PrintDetailedMap_End(json);
11252 #endif // #if VMA_STATS_STRING_ENABLED
11254 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11255 uint32_t currentFrameIndex,
11256 uint32_t frameInUseCount,
11257 VkDeviceSize bufferImageGranularity,
11258 VkDeviceSize allocSize,
11259 VkDeviceSize allocAlignment,
11261 VmaSuballocationType allocType,
11262 bool canMakeOtherLost,
11264 VmaAllocationRequest* pAllocationRequest)
11266 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11270 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11271 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11272 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11274 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11275 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11278 if(allocSize > m_UsableSize)
11283 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11284 for(uint32_t level = targetLevel + 1; level--; )
11286 for(Node* freeNode = m_FreeList[level].front;
11287 freeNode != VMA_NULL;
11288 freeNode = freeNode->free.next)
11290 if(freeNode->offset % allocAlignment == 0)
11292 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11293 pAllocationRequest->offset = freeNode->offset;
11294 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11295 pAllocationRequest->sumItemSize = 0;
11296 pAllocationRequest->itemsToMakeLostCount = 0;
11297 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11306 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11307 uint32_t currentFrameIndex,
11308 uint32_t frameInUseCount,
11309 VmaAllocationRequest* pAllocationRequest)
11315 return pAllocationRequest->itemsToMakeLostCount == 0;
11318 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11327 void VmaBlockMetadata_Buddy::Alloc(
11328 const VmaAllocationRequest& request,
11329 VmaSuballocationType type,
11330 VkDeviceSize allocSize,
11333 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11335 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11336 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11338 Node* currNode = m_FreeList[currLevel].front;
11339 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11340 while(currNode->offset != request.offset)
11342 currNode = currNode->free.next;
11343 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11347 while(currLevel < targetLevel)
11351 RemoveFromFreeList(currLevel, currNode);
11353 const uint32_t childrenLevel = currLevel + 1;
11356 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11357 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11359 leftChild->offset = currNode->offset;
11360 leftChild->type = Node::TYPE_FREE;
11361 leftChild->parent = currNode;
11362 leftChild->buddy = rightChild;
11364 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11365 rightChild->type = Node::TYPE_FREE;
11366 rightChild->parent = currNode;
11367 rightChild->buddy = leftChild;
11370 currNode->type = Node::TYPE_SPLIT;
11371 currNode->split.leftChild = leftChild;
11374 AddToFreeListFront(childrenLevel, rightChild);
11375 AddToFreeListFront(childrenLevel, leftChild);
11380 currNode = m_FreeList[currLevel].front;
11389 VMA_ASSERT(currLevel == targetLevel &&
11390 currNode != VMA_NULL &&
11391 currNode->type == Node::TYPE_FREE);
11392 RemoveFromFreeList(currLevel, currNode);
11395 currNode->type = Node::TYPE_ALLOCATION;
11396 currNode->allocation.alloc = hAllocation;
11398 ++m_AllocationCount;
11400 m_SumFreeSize -= allocSize;
11403 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11405 if(node->type == Node::TYPE_SPLIT)
11407 DeleteNode(node->split.leftChild->buddy);
11408 DeleteNode(node->split.leftChild);
11411 vma_delete(GetAllocationCallbacks(), node);
11414 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11416 VMA_VALIDATE(level < m_LevelCount);
11417 VMA_VALIDATE(curr->parent == parent);
11418 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11419 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11422 case Node::TYPE_FREE:
11424 ctx.calculatedSumFreeSize += levelNodeSize;
11425 ++ctx.calculatedFreeCount;
11427 case Node::TYPE_ALLOCATION:
11428 ++ctx.calculatedAllocationCount;
11429 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11430 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11432 case Node::TYPE_SPLIT:
11434 const uint32_t childrenLevel = level + 1;
11435 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11436 const Node*
const leftChild = curr->split.leftChild;
11437 VMA_VALIDATE(leftChild != VMA_NULL);
11438 VMA_VALIDATE(leftChild->offset == curr->offset);
11439 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11441 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11443 const Node*
const rightChild = leftChild->buddy;
11444 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11445 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11447 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11458 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11461 uint32_t level = 0;
11462 VkDeviceSize currLevelNodeSize = m_UsableSize;
11463 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11464 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11467 currLevelNodeSize = nextLevelNodeSize;
11468 nextLevelNodeSize = currLevelNodeSize >> 1;
11473 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11476 Node* node = m_Root;
11477 VkDeviceSize nodeOffset = 0;
11478 uint32_t level = 0;
11479 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11480 while(node->type == Node::TYPE_SPLIT)
11482 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11483 if(offset < nodeOffset + nextLevelSize)
11485 node = node->split.leftChild;
11489 node = node->split.leftChild->buddy;
11490 nodeOffset += nextLevelSize;
11493 levelNodeSize = nextLevelSize;
11496 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11497 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11500 --m_AllocationCount;
11501 m_SumFreeSize += alloc->GetSize();
11503 node->type = Node::TYPE_FREE;
11506 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11508 RemoveFromFreeList(level, node->buddy);
11509 Node*
const parent = node->parent;
11511 vma_delete(GetAllocationCallbacks(), node->buddy);
11512 vma_delete(GetAllocationCallbacks(), node);
11513 parent->type = Node::TYPE_FREE;
11521 AddToFreeListFront(level, node);
11524 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11528 case Node::TYPE_FREE:
11534 case Node::TYPE_ALLOCATION:
11536 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11542 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11543 if(unusedRangeSize > 0)
11552 case Node::TYPE_SPLIT:
11554 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11555 const Node*
const leftChild = node->split.leftChild;
11556 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11557 const Node*
const rightChild = leftChild->buddy;
11558 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11566 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11568 VMA_ASSERT(node->type == Node::TYPE_FREE);
11571 Node*
const frontNode = m_FreeList[level].front;
11572 if(frontNode == VMA_NULL)
11574 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11575 node->free.prev = node->free.next = VMA_NULL;
11576 m_FreeList[level].front = m_FreeList[level].back = node;
11580 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11581 node->free.prev = VMA_NULL;
11582 node->free.next = frontNode;
11583 frontNode->free.prev = node;
11584 m_FreeList[level].front = node;
11588 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11590 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11593 if(node->free.prev == VMA_NULL)
11595 VMA_ASSERT(m_FreeList[level].front == node);
11596 m_FreeList[level].front = node->free.next;
11600 Node*
const prevFreeNode = node->free.prev;
11601 VMA_ASSERT(prevFreeNode->free.next == node);
11602 prevFreeNode->free.next = node->free.next;
11606 if(node->free.next == VMA_NULL)
11608 VMA_ASSERT(m_FreeList[level].back == node);
11609 m_FreeList[level].back = node->free.prev;
11613 Node*
const nextFreeNode = node->free.next;
11614 VMA_ASSERT(nextFreeNode->free.prev == node);
11615 nextFreeNode->free.prev = node->free.prev;
11619 #if VMA_STATS_STRING_ENABLED
11620 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
11624 case Node::TYPE_FREE:
11625 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11627 case Node::TYPE_ALLOCATION:
11629 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11630 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11631 if(allocSize < levelNodeSize)
11633 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11637 case Node::TYPE_SPLIT:
11639 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11640 const Node*
const leftChild = node->split.leftChild;
11641 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11642 const Node*
const rightChild = leftChild->buddy;
11643 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11650 #endif // #if VMA_STATS_STRING_ENABLED
11656 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11657 m_pMetadata(VMA_NULL),
11658 m_MemoryTypeIndex(UINT32_MAX),
11660 m_hMemory(VK_NULL_HANDLE),
11662 m_pMappedData(VMA_NULL)
11666 void VmaDeviceMemoryBlock::Init(
11669 uint32_t newMemoryTypeIndex,
11670 VkDeviceMemory newMemory,
11671 VkDeviceSize newSize,
11673 uint32_t algorithm)
11675 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11677 m_hParentPool = hParentPool;
11678 m_MemoryTypeIndex = newMemoryTypeIndex;
11680 m_hMemory = newMemory;
11685 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11688 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11694 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11696 m_pMetadata->Init(newSize);
11699 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11703 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11705 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11706 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11707 m_hMemory = VK_NULL_HANDLE;
11709 vma_delete(allocator, m_pMetadata);
11710 m_pMetadata = VMA_NULL;
11713 bool VmaDeviceMemoryBlock::Validate()
const
11715 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11716 (m_pMetadata->GetSize() != 0));
11718 return m_pMetadata->Validate();
11721 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11723 void* pData =
nullptr;
11724 VkResult res = Map(hAllocator, 1, &pData);
11725 if(res != VK_SUCCESS)
11730 res = m_pMetadata->CheckCorruption(pData);
11732 Unmap(hAllocator, 1);
11737 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11744 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11745 if(m_MapCount != 0)
11747 m_MapCount += count;
11748 VMA_ASSERT(m_pMappedData != VMA_NULL);
11749 if(ppData != VMA_NULL)
11751 *ppData = m_pMappedData;
11757 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11758 hAllocator->m_hDevice,
11764 if(result == VK_SUCCESS)
11766 if(ppData != VMA_NULL)
11768 *ppData = m_pMappedData;
11770 m_MapCount = count;
11776 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11783 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11784 if(m_MapCount >= count)
11786 m_MapCount -= count;
11787 if(m_MapCount == 0)
11789 m_pMappedData = VMA_NULL;
11790 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11795 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11799 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11801 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11802 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11805 VkResult res = Map(hAllocator, 1, &pData);
11806 if(res != VK_SUCCESS)
11811 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11812 VmaWriteMagicValue(pData, allocOffset + allocSize);
11814 Unmap(hAllocator, 1);
11819 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11821 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11822 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11825 VkResult res = Map(hAllocator, 1, &pData);
11826 if(res != VK_SUCCESS)
11831 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11833 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11835 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11837 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11840 Unmap(hAllocator, 1);
11845 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11848 VkDeviceSize allocationLocalOffset,
11852 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11853 hAllocation->GetBlock() ==
this);
11854 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11855 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11856 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11858 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11859 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11862 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11865 VkDeviceSize allocationLocalOffset,
11869 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11870 hAllocation->GetBlock() ==
this);
11871 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11872 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11873 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11875 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11876 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11881 memset(&outInfo, 0,
sizeof(outInfo));
11900 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11908 VmaPool_T::VmaPool_T(
11911 VkDeviceSize preferredBlockSize) :
11915 createInfo.memoryTypeIndex,
11916 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11917 createInfo.minBlockCount,
11918 createInfo.maxBlockCount,
11920 createInfo.frameInUseCount,
11921 createInfo.blockSize != 0,
11928 VmaPool_T::~VmaPool_T()
11932 void VmaPool_T::SetName(
const char* pName)
11934 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
11935 VmaFreeString(allocs, m_Name);
11937 if(pName != VMA_NULL)
11939 m_Name = VmaCreateStringCopy(allocs, pName);
11947 #if VMA_STATS_STRING_ENABLED
11949 #endif // #if VMA_STATS_STRING_ENABLED
11951 VmaBlockVector::VmaBlockVector(
11954 uint32_t memoryTypeIndex,
11955 VkDeviceSize preferredBlockSize,
11956 size_t minBlockCount,
11957 size_t maxBlockCount,
11958 VkDeviceSize bufferImageGranularity,
11959 uint32_t frameInUseCount,
11960 bool explicitBlockSize,
11961 uint32_t algorithm) :
11962 m_hAllocator(hAllocator),
11963 m_hParentPool(hParentPool),
11964 m_MemoryTypeIndex(memoryTypeIndex),
11965 m_PreferredBlockSize(preferredBlockSize),
11966 m_MinBlockCount(minBlockCount),
11967 m_MaxBlockCount(maxBlockCount),
11968 m_BufferImageGranularity(bufferImageGranularity),
11969 m_FrameInUseCount(frameInUseCount),
11970 m_ExplicitBlockSize(explicitBlockSize),
11971 m_Algorithm(algorithm),
11972 m_HasEmptyBlock(false),
11973 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11978 VmaBlockVector::~VmaBlockVector()
11980 for(
size_t i = m_Blocks.size(); i--; )
11982 m_Blocks[i]->Destroy(m_hAllocator);
11983 vma_delete(m_hAllocator, m_Blocks[i]);
11987 VkResult VmaBlockVector::CreateMinBlocks()
11989 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11991 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11992 if(res != VK_SUCCESS)
12000 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12002 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12004 const size_t blockCount = m_Blocks.size();
12013 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12015 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12016 VMA_ASSERT(pBlock);
12017 VMA_HEAVY_ASSERT(pBlock->Validate());
12018 pBlock->m_pMetadata->AddPoolStats(*pStats);
12022 bool VmaBlockVector::IsEmpty()
12024 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12025 return m_Blocks.empty();
12028 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12030 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12031 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12032 (VMA_DEBUG_MARGIN > 0) &&
12034 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12037 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12039 VkResult VmaBlockVector::Allocate(
12040 uint32_t currentFrameIndex,
12042 VkDeviceSize alignment,
12044 VmaSuballocationType suballocType,
12045 size_t allocationCount,
12049 VkResult res = VK_SUCCESS;
12051 if(IsCorruptionDetectionEnabled())
12053 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12054 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12058 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12059 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12061 res = AllocatePage(
12067 pAllocations + allocIndex);
12068 if(res != VK_SUCCESS)
12075 if(res != VK_SUCCESS)
12078 while(allocIndex--)
12080 Free(pAllocations[allocIndex]);
12082 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12088 VkResult VmaBlockVector::AllocatePage(
12089 uint32_t currentFrameIndex,
12091 VkDeviceSize alignment,
12093 VmaSuballocationType suballocType,
12102 VkDeviceSize freeMemory;
12104 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12106 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12110 const bool canFallbackToDedicated = !IsCustomPool();
12111 const bool canCreateNewBlock =
12113 (m_Blocks.size() < m_MaxBlockCount) &&
12114 (freeMemory >= size || !canFallbackToDedicated);
12121 canMakeOtherLost =
false;
12125 if(isUpperAddress &&
12128 return VK_ERROR_FEATURE_NOT_PRESENT;
12142 return VK_ERROR_FEATURE_NOT_PRESENT;
12146 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12148 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12156 if(!canMakeOtherLost || canCreateNewBlock)
12165 if(!m_Blocks.empty())
12167 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12168 VMA_ASSERT(pCurrBlock);
12169 VkResult res = AllocateFromBlock(
12179 if(res == VK_SUCCESS)
12181 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12191 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12193 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12194 VMA_ASSERT(pCurrBlock);
12195 VkResult res = AllocateFromBlock(
12205 if(res == VK_SUCCESS)
12207 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12215 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12217 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12218 VMA_ASSERT(pCurrBlock);
12219 VkResult res = AllocateFromBlock(
12229 if(res == VK_SUCCESS)
12231 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12239 if(canCreateNewBlock)
12242 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12243 uint32_t newBlockSizeShift = 0;
12244 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12246 if(!m_ExplicitBlockSize)
12249 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12250 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12252 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12253 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12255 newBlockSize = smallerNewBlockSize;
12256 ++newBlockSizeShift;
12265 size_t newBlockIndex = 0;
12266 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12267 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12269 if(!m_ExplicitBlockSize)
12271 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12273 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12274 if(smallerNewBlockSize >= size)
12276 newBlockSize = smallerNewBlockSize;
12277 ++newBlockSizeShift;
12278 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12279 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12288 if(res == VK_SUCCESS)
12290 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12291 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12293 res = AllocateFromBlock(
12303 if(res == VK_SUCCESS)
12305 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12311 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12318 if(canMakeOtherLost)
12320 uint32_t tryIndex = 0;
12321 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12323 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12324 VmaAllocationRequest bestRequest = {};
12325 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12331 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12333 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12334 VMA_ASSERT(pCurrBlock);
12335 VmaAllocationRequest currRequest = {};
12336 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12339 m_BufferImageGranularity,
12348 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12349 if(pBestRequestBlock == VMA_NULL ||
12350 currRequestCost < bestRequestCost)
12352 pBestRequestBlock = pCurrBlock;
12353 bestRequest = currRequest;
12354 bestRequestCost = currRequestCost;
12356 if(bestRequestCost == 0)
12367 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12369 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12370 VMA_ASSERT(pCurrBlock);
12371 VmaAllocationRequest currRequest = {};
12372 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12375 m_BufferImageGranularity,
12384 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12385 if(pBestRequestBlock == VMA_NULL ||
12386 currRequestCost < bestRequestCost ||
12389 pBestRequestBlock = pCurrBlock;
12390 bestRequest = currRequest;
12391 bestRequestCost = currRequestCost;
12393 if(bestRequestCost == 0 ||
12403 if(pBestRequestBlock != VMA_NULL)
12407 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12408 if(res != VK_SUCCESS)
12414 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12420 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12421 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12422 UpdateHasEmptyBlock();
12423 (*pAllocation)->InitBlockAllocation(
12425 bestRequest.offset,
12432 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12433 VMA_DEBUG_LOG(
" Returned from existing block");
12434 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12435 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12436 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12438 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12440 if(IsCorruptionDetectionEnabled())
12442 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12443 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12458 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12460 return VK_ERROR_TOO_MANY_OBJECTS;
12464 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12467 void VmaBlockVector::Free(
12470 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12472 bool budgetExceeded =
false;
12474 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12476 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12477 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12482 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12484 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12486 if(IsCorruptionDetectionEnabled())
12488 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12489 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12492 if(hAllocation->IsPersistentMap())
12494 pBlock->Unmap(m_hAllocator, 1);
12497 pBlock->m_pMetadata->Free(hAllocation);
12498 VMA_HEAVY_ASSERT(pBlock->Validate());
12500 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12502 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12504 if(pBlock->m_pMetadata->IsEmpty())
12507 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12509 pBlockToDelete = pBlock;
12516 else if(m_HasEmptyBlock && canDeleteBlock)
12518 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12519 if(pLastBlock->m_pMetadata->IsEmpty())
12521 pBlockToDelete = pLastBlock;
12522 m_Blocks.pop_back();
12526 UpdateHasEmptyBlock();
12527 IncrementallySortBlocks();
12532 if(pBlockToDelete != VMA_NULL)
12534 VMA_DEBUG_LOG(
" Deleted empty block");
12535 pBlockToDelete->Destroy(m_hAllocator);
12536 vma_delete(m_hAllocator, pBlockToDelete);
12540 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12542 VkDeviceSize result = 0;
12543 for(
size_t i = m_Blocks.size(); i--; )
12545 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12546 if(result >= m_PreferredBlockSize)
12554 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12556 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12558 if(m_Blocks[blockIndex] == pBlock)
12560 VmaVectorRemove(m_Blocks, blockIndex);
12567 void VmaBlockVector::IncrementallySortBlocks()
12572 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12574 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12576 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12583 VkResult VmaBlockVector::AllocateFromBlock(
12584 VmaDeviceMemoryBlock* pBlock,
12585 uint32_t currentFrameIndex,
12587 VkDeviceSize alignment,
12590 VmaSuballocationType suballocType,
12599 VmaAllocationRequest currRequest = {};
12600 if(pBlock->m_pMetadata->CreateAllocationRequest(
12603 m_BufferImageGranularity,
12613 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12617 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12618 if(res != VK_SUCCESS)
12624 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12625 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12626 UpdateHasEmptyBlock();
12627 (*pAllocation)->InitBlockAllocation(
12629 currRequest.offset,
12636 VMA_HEAVY_ASSERT(pBlock->Validate());
12637 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12638 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12639 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12641 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12643 if(IsCorruptionDetectionEnabled())
12645 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12646 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12650 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12653 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12655 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12656 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12657 allocInfo.allocationSize = blockSize;
12658 VkDeviceMemory mem = VK_NULL_HANDLE;
12659 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12668 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12674 allocInfo.allocationSize,
12678 m_Blocks.push_back(pBlock);
12679 if(pNewBlockIndex != VMA_NULL)
12681 *pNewBlockIndex = m_Blocks.size() - 1;
12687 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12688 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12689 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12691 const size_t blockCount = m_Blocks.size();
12692 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12696 BLOCK_FLAG_USED = 0x00000001,
12697 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12705 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12706 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12707 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12710 const size_t moveCount = moves.size();
12711 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12713 const VmaDefragmentationMove& move = moves[moveIndex];
12714 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12715 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12718 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12721 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12723 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12724 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12725 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12727 currBlockInfo.pMappedData = pBlock->GetMappedData();
12729 if(currBlockInfo.pMappedData == VMA_NULL)
12731 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12732 if(pDefragCtx->res == VK_SUCCESS)
12734 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12741 if(pDefragCtx->res == VK_SUCCESS)
12743 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12744 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12746 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12748 const VmaDefragmentationMove& move = moves[moveIndex];
12750 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12751 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12753 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12758 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12759 memRange.memory = pSrcBlock->GetDeviceMemory();
12760 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12761 memRange.size = VMA_MIN(
12762 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12763 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12764 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12769 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
12770 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
12771 static_cast<size_t>(move.size));
12773 if(IsCorruptionDetectionEnabled())
12775 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12776 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12782 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12783 memRange.memory = pDstBlock->GetDeviceMemory();
12784 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12785 memRange.size = VMA_MIN(
12786 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12787 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12788 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12795 for(
size_t blockIndex = blockCount; blockIndex--; )
12797 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12798 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12800 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12801 pBlock->Unmap(m_hAllocator, 1);
12806 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12807 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12808 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12809 VkCommandBuffer commandBuffer)
12811 const size_t blockCount = m_Blocks.size();
12813 pDefragCtx->blockContexts.resize(blockCount);
12814 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12817 const size_t moveCount = moves.size();
12818 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12820 const VmaDefragmentationMove& move = moves[moveIndex];
12825 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12826 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12830 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12834 VkBufferCreateInfo bufCreateInfo;
12835 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12837 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12839 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12840 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12841 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12843 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12844 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12845 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12846 if(pDefragCtx->res == VK_SUCCESS)
12848 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12849 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12856 if(pDefragCtx->res == VK_SUCCESS)
12858 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12860 const VmaDefragmentationMove& move = moves[moveIndex];
12862 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12863 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12865 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12867 VkBufferCopy region = {
12871 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12872 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12877 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12879 pDefragCtx->res = VK_NOT_READY;
12885 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12887 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12888 if(pBlock->m_pMetadata->IsEmpty())
12890 if(m_Blocks.size() > m_MinBlockCount)
12892 if(pDefragmentationStats != VMA_NULL)
12895 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12898 VmaVectorRemove(m_Blocks, blockIndex);
12899 pBlock->Destroy(m_hAllocator);
12900 vma_delete(m_hAllocator, pBlock);
12908 UpdateHasEmptyBlock();
12911 void VmaBlockVector::UpdateHasEmptyBlock()
12913 m_HasEmptyBlock =
false;
12914 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
12916 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
12917 if(pBlock->m_pMetadata->IsEmpty())
12919 m_HasEmptyBlock =
true;
12925 #if VMA_STATS_STRING_ENABLED
12927 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12929 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12931 json.BeginObject();
12935 const char* poolName = m_hParentPool->GetName();
12936 if(poolName != VMA_NULL && poolName[0] !=
'\0')
12938 json.WriteString(
"Name");
12939 json.WriteString(poolName);
12942 json.WriteString(
"MemoryTypeIndex");
12943 json.WriteNumber(m_MemoryTypeIndex);
12945 json.WriteString(
"BlockSize");
12946 json.WriteNumber(m_PreferredBlockSize);
12948 json.WriteString(
"BlockCount");
12949 json.BeginObject(
true);
12950 if(m_MinBlockCount > 0)
12952 json.WriteString(
"Min");
12953 json.WriteNumber((uint64_t)m_MinBlockCount);
12955 if(m_MaxBlockCount < SIZE_MAX)
12957 json.WriteString(
"Max");
12958 json.WriteNumber((uint64_t)m_MaxBlockCount);
12960 json.WriteString(
"Cur");
12961 json.WriteNumber((uint64_t)m_Blocks.size());
12964 if(m_FrameInUseCount > 0)
12966 json.WriteString(
"FrameInUseCount");
12967 json.WriteNumber(m_FrameInUseCount);
12970 if(m_Algorithm != 0)
12972 json.WriteString(
"Algorithm");
12973 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12978 json.WriteString(
"PreferredBlockSize");
12979 json.WriteNumber(m_PreferredBlockSize);
12982 json.WriteString(
"Blocks");
12983 json.BeginObject();
12984 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12986 json.BeginString();
12987 json.ContinueString(m_Blocks[i]->GetId());
12990 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12997 #endif // #if VMA_STATS_STRING_ENABLED
12999 void VmaBlockVector::Defragment(
13000 class VmaBlockVectorDefragmentationContext* pCtx,
13002 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13003 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13004 VkCommandBuffer commandBuffer)
13006 pCtx->res = VK_SUCCESS;
13008 const VkMemoryPropertyFlags memPropFlags =
13009 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13010 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13012 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13014 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13015 !IsCorruptionDetectionEnabled() &&
13016 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13019 if(canDefragmentOnCpu || canDefragmentOnGpu)
13021 bool defragmentOnGpu;
13023 if(canDefragmentOnGpu != canDefragmentOnCpu)
13025 defragmentOnGpu = canDefragmentOnGpu;
13030 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13031 m_hAllocator->IsIntegratedGpu();
13034 bool overlappingMoveSupported = !defragmentOnGpu;
13036 if(m_hAllocator->m_UseMutex)
13040 if(!m_Mutex.TryLockWrite())
13042 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
13048 m_Mutex.LockWrite();
13049 pCtx->mutexLocked =
true;
13053 pCtx->Begin(overlappingMoveSupported, flags);
13057 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
13058 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
13059 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
13062 if(pStats != VMA_NULL)
13064 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
13065 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
13068 VMA_ASSERT(bytesMoved <= maxBytesToMove);
13069 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
13070 if(defragmentOnGpu)
13072 maxGpuBytesToMove -= bytesMoved;
13073 maxGpuAllocationsToMove -= allocationsMoved;
13077 maxCpuBytesToMove -= bytesMoved;
13078 maxCpuAllocationsToMove -= allocationsMoved;
13084 if(m_hAllocator->m_UseMutex)
13085 m_Mutex.UnlockWrite();
13087 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
13088 pCtx->res = VK_NOT_READY;
13093 if(pCtx->res >= VK_SUCCESS)
13095 if(defragmentOnGpu)
13097 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
13101 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
13107 void VmaBlockVector::DefragmentationEnd(
13108 class VmaBlockVectorDefragmentationContext* pCtx,
13112 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
13114 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
13115 if(blockCtx.hBuffer)
13117 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
13118 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
13122 if(pCtx->res >= VK_SUCCESS)
13124 FreeEmptyBlocks(pStats);
13127 if(pCtx->mutexLocked)
13129 VMA_ASSERT(m_hAllocator->m_UseMutex);
13130 m_Mutex.UnlockWrite();
13134 uint32_t VmaBlockVector::ProcessDefragmentations(
13135 class VmaBlockVectorDefragmentationContext *pCtx,
13138 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13140 const uint32_t moveCount = std::min(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
13142 for(uint32_t i = 0; i < moveCount; ++ i)
13144 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
13147 pMove->
memory = move.pDstBlock->GetDeviceMemory();
13148 pMove->
offset = move.dstOffset;
13153 pCtx->defragmentationMovesProcessed += moveCount;
13158 void VmaBlockVector::CommitDefragmentations(
13159 class VmaBlockVectorDefragmentationContext *pCtx,
13162 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13164 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
13166 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
13168 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
13169 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
13172 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
13173 FreeEmptyBlocks(pStats);
13176 size_t VmaBlockVector::CalcAllocationCount()
const
13179 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13181 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
13186 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
13188 if(m_BufferImageGranularity == 1)
13192 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
13193 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
13195 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
13196 VMA_ASSERT(m_Algorithm == 0);
13197 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
13198 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
13206 void VmaBlockVector::MakePoolAllocationsLost(
13207 uint32_t currentFrameIndex,
13208 size_t* pLostAllocationCount)
13210 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13211 size_t lostAllocationCount = 0;
13212 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13214 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13215 VMA_ASSERT(pBlock);
13216 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
13218 if(pLostAllocationCount != VMA_NULL)
13220 *pLostAllocationCount = lostAllocationCount;
13224 VkResult VmaBlockVector::CheckCorruption()
13226 if(!IsCorruptionDetectionEnabled())
13228 return VK_ERROR_FEATURE_NOT_PRESENT;
13231 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13232 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13234 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13235 VMA_ASSERT(pBlock);
13236 VkResult res = pBlock->CheckCorruption(m_hAllocator);
13237 if(res != VK_SUCCESS)
13245 void VmaBlockVector::AddStats(
VmaStats* pStats)
13247 const uint32_t memTypeIndex = m_MemoryTypeIndex;
13248 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
13250 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13252 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13254 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13255 VMA_ASSERT(pBlock);
13256 VMA_HEAVY_ASSERT(pBlock->Validate());
13258 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13259 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13260 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13261 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13268 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13270 VmaBlockVector* pBlockVector,
13271 uint32_t currentFrameIndex,
13272 bool overlappingMoveSupported) :
13273 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13274 m_AllocationCount(0),
13275 m_AllAllocations(false),
13277 m_AllocationsMoved(0),
13278 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13281 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13282 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13284 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13285 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13286 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13287 m_Blocks.push_back(pBlockInfo);
13291 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13294 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13296 for(
size_t i = m_Blocks.size(); i--; )
13298 vma_delete(m_hAllocator, m_Blocks[i]);
13302 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13305 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13307 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13308 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13309 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13311 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13312 (*it)->m_Allocations.push_back(allocInfo);
13319 ++m_AllocationCount;
13323 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13324 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13325 VkDeviceSize maxBytesToMove,
13326 uint32_t maxAllocationsToMove,
13327 bool freeOldAllocations)
13329 if(m_Blocks.empty())
13342 size_t srcBlockMinIndex = 0;
13355 size_t srcBlockIndex = m_Blocks.size() - 1;
13356 size_t srcAllocIndex = SIZE_MAX;
13362 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13364 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13367 if(srcBlockIndex == srcBlockMinIndex)
13374 srcAllocIndex = SIZE_MAX;
13379 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13383 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13384 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13386 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13387 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13388 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13389 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13392 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13394 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13395 VmaAllocationRequest dstAllocRequest;
13396 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13397 m_CurrentFrameIndex,
13398 m_pBlockVector->GetFrameInUseCount(),
13399 m_pBlockVector->GetBufferImageGranularity(),
13406 &dstAllocRequest) &&
13408 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13410 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13413 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13414 (m_BytesMoved + size > maxBytesToMove))
13419 VmaDefragmentationMove move = {};
13420 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13421 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13422 move.srcOffset = srcOffset;
13423 move.dstOffset = dstAllocRequest.offset;
13425 move.hAllocation = allocInfo.m_hAllocation;
13426 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
13427 move.pDstBlock = pDstBlockInfo->m_pBlock;
13429 moves.push_back(move);
13431 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13435 allocInfo.m_hAllocation);
13437 if(freeOldAllocations)
13439 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13440 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13443 if(allocInfo.m_pChanged != VMA_NULL)
13445 *allocInfo.m_pChanged = VK_TRUE;
13448 ++m_AllocationsMoved;
13449 m_BytesMoved += size;
13451 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13459 if(srcAllocIndex > 0)
13465 if(srcBlockIndex > 0)
13468 srcAllocIndex = SIZE_MAX;
13478 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13481 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13483 if(m_Blocks[i]->m_HasNonMovableAllocations)
13491 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13492 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13493 VkDeviceSize maxBytesToMove,
13494 uint32_t maxAllocationsToMove,
13497 if(!m_AllAllocations && m_AllocationCount == 0)
13502 const size_t blockCount = m_Blocks.size();
13503 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13505 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13507 if(m_AllAllocations)
13509 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13510 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13511 it != pMetadata->m_Suballocations.end();
13514 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13516 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13517 pBlockInfo->m_Allocations.push_back(allocInfo);
13522 pBlockInfo->CalcHasNonMovableAllocations();
13526 pBlockInfo->SortAllocationsByOffsetDescending();
13532 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
13535 const uint32_t roundCount = 2;
13538 VkResult result = VK_SUCCESS;
13539 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
13547 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
13548 size_t dstBlockIndex, VkDeviceSize dstOffset,
13549 size_t srcBlockIndex, VkDeviceSize srcOffset)
13551 if(dstBlockIndex < srcBlockIndex)
13555 if(dstBlockIndex > srcBlockIndex)
13559 if(dstOffset < srcOffset)
13569 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
13571 VmaBlockVector* pBlockVector,
13572 uint32_t currentFrameIndex,
13573 bool overlappingMoveSupported) :
13574 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13575 m_OverlappingMoveSupported(overlappingMoveSupported),
13576 m_AllocationCount(0),
13577 m_AllAllocations(false),
13579 m_AllocationsMoved(0),
13580 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
13582 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
13586 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
13590 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
13591 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13592 VkDeviceSize maxBytesToMove,
13593 uint32_t maxAllocationsToMove,
13596 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
13598 const size_t blockCount = m_pBlockVector->GetBlockCount();
13599 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
13604 PreprocessMetadata();
13608 m_BlockInfos.resize(blockCount);
13609 for(
size_t i = 0; i < blockCount; ++i)
13611 m_BlockInfos[i].origBlockIndex = i;
13614 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13615 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13616 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13621 FreeSpaceDatabase freeSpaceDb;
13623 size_t dstBlockInfoIndex = 0;
13624 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13625 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13626 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13627 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13628 VkDeviceSize dstOffset = 0;
13631 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13633 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13634 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13635 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13636 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13637 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13639 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13640 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13641 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13642 if(m_AllocationsMoved == maxAllocationsToMove ||
13643 m_BytesMoved + srcAllocSize > maxBytesToMove)
13648 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13650 VmaDefragmentationMove move = {};
13652 size_t freeSpaceInfoIndex;
13653 VkDeviceSize dstAllocOffset;
13654 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13655 freeSpaceInfoIndex, dstAllocOffset))
13657 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13658 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13659 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13662 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13664 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13668 VmaSuballocation suballoc = *srcSuballocIt;
13669 suballoc.offset = dstAllocOffset;
13670 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13671 m_BytesMoved += srcAllocSize;
13672 ++m_AllocationsMoved;
13674 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13676 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13677 srcSuballocIt = nextSuballocIt;
13679 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13681 move.srcBlockIndex = srcOrigBlockIndex;
13682 move.dstBlockIndex = freeSpaceOrigBlockIndex;
13683 move.srcOffset = srcAllocOffset;
13684 move.dstOffset = dstAllocOffset;
13685 move.size = srcAllocSize;
13687 moves.push_back(move);
13694 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13696 VmaSuballocation suballoc = *srcSuballocIt;
13697 suballoc.offset = dstAllocOffset;
13698 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13699 m_BytesMoved += srcAllocSize;
13700 ++m_AllocationsMoved;
13702 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13704 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13705 srcSuballocIt = nextSuballocIt;
13707 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13709 move.srcBlockIndex = srcOrigBlockIndex;
13710 move.dstBlockIndex = freeSpaceOrigBlockIndex;
13711 move.srcOffset = srcAllocOffset;
13712 move.dstOffset = dstAllocOffset;
13713 move.size = srcAllocSize;
13715 moves.push_back(move);
13720 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13723 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13724 dstAllocOffset + srcAllocSize > dstBlockSize)
13727 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13729 ++dstBlockInfoIndex;
13730 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13731 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13732 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13733 dstBlockSize = pDstMetadata->GetSize();
13735 dstAllocOffset = 0;
13739 if(dstBlockInfoIndex == srcBlockInfoIndex)
13741 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13743 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13745 bool skipOver = overlap;
13746 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13750 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13755 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13757 dstOffset = srcAllocOffset + srcAllocSize;
13763 srcSuballocIt->offset = dstAllocOffset;
13764 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13765 dstOffset = dstAllocOffset + srcAllocSize;
13766 m_BytesMoved += srcAllocSize;
13767 ++m_AllocationsMoved;
13770 move.srcBlockIndex = srcOrigBlockIndex;
13771 move.dstBlockIndex = dstOrigBlockIndex;
13772 move.srcOffset = srcAllocOffset;
13773 move.dstOffset = dstAllocOffset;
13774 move.size = srcAllocSize;
13776 moves.push_back(move);
13784 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13785 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13787 VmaSuballocation suballoc = *srcSuballocIt;
13788 suballoc.offset = dstAllocOffset;
13789 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13790 dstOffset = dstAllocOffset + srcAllocSize;
13791 m_BytesMoved += srcAllocSize;
13792 ++m_AllocationsMoved;
13794 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13796 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13797 srcSuballocIt = nextSuballocIt;
13799 pDstMetadata->m_Suballocations.push_back(suballoc);
13801 move.srcBlockIndex = srcOrigBlockIndex;
13802 move.dstBlockIndex = dstOrigBlockIndex;
13803 move.srcOffset = srcAllocOffset;
13804 move.dstOffset = dstAllocOffset;
13805 move.size = srcAllocSize;
13807 moves.push_back(move);
13813 m_BlockInfos.clear();
13815 PostprocessMetadata();
13820 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13822 const size_t blockCount = m_pBlockVector->GetBlockCount();
13823 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13825 VmaBlockMetadata_Generic*
const pMetadata =
13826 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13827 pMetadata->m_FreeCount = 0;
13828 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13829 pMetadata->m_FreeSuballocationsBySize.clear();
13830 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13831 it != pMetadata->m_Suballocations.end(); )
13833 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13835 VmaSuballocationList::iterator nextIt = it;
13837 pMetadata->m_Suballocations.erase(it);
13848 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13850 const size_t blockCount = m_pBlockVector->GetBlockCount();
13851 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13853 VmaBlockMetadata_Generic*
const pMetadata =
13854 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13855 const VkDeviceSize blockSize = pMetadata->GetSize();
13858 if(pMetadata->m_Suballocations.empty())
13860 pMetadata->m_FreeCount = 1;
13862 VmaSuballocation suballoc = {
13866 VMA_SUBALLOCATION_TYPE_FREE };
13867 pMetadata->m_Suballocations.push_back(suballoc);
13868 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13873 VkDeviceSize offset = 0;
13874 VmaSuballocationList::iterator it;
13875 for(it = pMetadata->m_Suballocations.begin();
13876 it != pMetadata->m_Suballocations.end();
13879 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13880 VMA_ASSERT(it->offset >= offset);
13883 if(it->offset > offset)
13885 ++pMetadata->m_FreeCount;
13886 const VkDeviceSize freeSize = it->offset - offset;
13887 VmaSuballocation suballoc = {
13891 VMA_SUBALLOCATION_TYPE_FREE };
13892 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13893 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13895 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13899 pMetadata->m_SumFreeSize -= it->size;
13900 offset = it->offset + it->size;
13904 if(offset < blockSize)
13906 ++pMetadata->m_FreeCount;
13907 const VkDeviceSize freeSize = blockSize - offset;
13908 VmaSuballocation suballoc = {
13912 VMA_SUBALLOCATION_TYPE_FREE };
13913 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13914 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13915 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13917 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13922 pMetadata->m_FreeSuballocationsBySize.begin(),
13923 pMetadata->m_FreeSuballocationsBySize.end(),
13924 VmaSuballocationItemSizeLess());
13927 VMA_HEAVY_ASSERT(pMetadata->Validate());
13931 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13934 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13935 while(it != pMetadata->m_Suballocations.end())
13937 if(it->offset < suballoc.offset)
13942 pMetadata->m_Suballocations.insert(it, suballoc);
13948 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13951 VmaBlockVector* pBlockVector,
13952 uint32_t currFrameIndex) :
13954 mutexLocked(false),
13955 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13956 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
13957 defragmentationMovesProcessed(0),
13958 defragmentationMovesCommitted(0),
13959 hasDefragmentationPlan(0),
13960 m_hAllocator(hAllocator),
13961 m_hCustomPool(hCustomPool),
13962 m_pBlockVector(pBlockVector),
13963 m_CurrFrameIndex(currFrameIndex),
13964 m_pAlgorithm(VMA_NULL),
13965 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13966 m_AllAllocations(false)
13970 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13972 vma_delete(m_hAllocator, m_pAlgorithm);
13975 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13977 AllocInfo info = { hAlloc, pChanged };
13978 m_Allocations.push_back(info);
13981 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
13983 const bool allAllocations = m_AllAllocations ||
13984 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13997 if(VMA_DEBUG_MARGIN == 0 &&
13999 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14002 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14003 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14007 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
14008 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14013 m_pAlgorithm->AddAll();
14017 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
14019 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
14027 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
14029 uint32_t currFrameIndex,
14032 m_hAllocator(hAllocator),
14033 m_CurrFrameIndex(currFrameIndex),
14036 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
14038 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
14041 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
14043 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14045 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
14046 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
14047 vma_delete(m_hAllocator, pBlockVectorCtx);
14049 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
14051 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
14052 if(pBlockVectorCtx)
14054 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
14055 vma_delete(m_hAllocator, pBlockVectorCtx);
14060 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
14062 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
14064 VmaPool pool = pPools[poolIndex];
14067 if(pool->m_BlockVector.GetAlgorithm() == 0)
14069 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14071 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14073 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
14075 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14080 if(!pBlockVectorDefragCtx)
14082 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14085 &pool->m_BlockVector,
14087 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14090 pBlockVectorDefragCtx->AddAll();
14095 void VmaDefragmentationContext_T::AddAllocations(
14096 uint32_t allocationCount,
14098 VkBool32* pAllocationsChanged)
14101 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14104 VMA_ASSERT(hAlloc);
14106 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
14108 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
14110 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14112 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
14114 if(hAllocPool != VK_NULL_HANDLE)
14117 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
14119 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14121 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
14123 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14127 if(!pBlockVectorDefragCtx)
14129 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14132 &hAllocPool->m_BlockVector,
14134 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14141 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
14142 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
14143 if(!pBlockVectorDefragCtx)
14145 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14148 m_hAllocator->m_pBlockVectors[memTypeIndex],
14150 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
14154 if(pBlockVectorDefragCtx)
14156 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
14157 &pAllocationsChanged[allocIndex] : VMA_NULL;
14158 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
14164 VkResult VmaDefragmentationContext_T::Defragment(
14165 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
14166 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
14178 m_MaxCpuBytesToMove = maxCpuBytesToMove;
14179 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
14181 m_MaxGpuBytesToMove = maxGpuBytesToMove;
14182 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
14184 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
14185 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
14188 return VK_NOT_READY;
14191 if(commandBuffer == VK_NULL_HANDLE)
14193 maxGpuBytesToMove = 0;
14194 maxGpuAllocationsToMove = 0;
14197 VkResult res = VK_SUCCESS;
14200 for(uint32_t memTypeIndex = 0;
14201 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
14204 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14205 if(pBlockVectorCtx)
14207 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14208 pBlockVectorCtx->GetBlockVector()->Defragment(
14211 maxCpuBytesToMove, maxCpuAllocationsToMove,
14212 maxGpuBytesToMove, maxGpuAllocationsToMove,
14214 if(pBlockVectorCtx->res != VK_SUCCESS)
14216 res = pBlockVectorCtx->res;
14222 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14223 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
14226 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14227 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14228 pBlockVectorCtx->GetBlockVector()->Defragment(
14231 maxCpuBytesToMove, maxCpuAllocationsToMove,
14232 maxGpuBytesToMove, maxGpuAllocationsToMove,
14234 if(pBlockVectorCtx->res != VK_SUCCESS)
14236 res = pBlockVectorCtx->res;
14249 for(uint32_t memTypeIndex = 0;
14250 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14253 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14254 if(pBlockVectorCtx)
14256 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14258 if(!pBlockVectorCtx->hasDefragmentationPlan)
14260 pBlockVectorCtx->GetBlockVector()->Defragment(
14263 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14264 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14267 if(pBlockVectorCtx->res < VK_SUCCESS)
14270 pBlockVectorCtx->hasDefragmentationPlan =
true;
14273 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14275 pCurrentMove, movesLeft);
14277 movesLeft -= processed;
14278 pCurrentMove += processed;
14283 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14284 customCtxIndex < customCtxCount;
14287 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14288 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14290 if(!pBlockVectorCtx->hasDefragmentationPlan)
14292 pBlockVectorCtx->GetBlockVector()->Defragment(
14295 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14296 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14299 if(pBlockVectorCtx->res < VK_SUCCESS)
14302 pBlockVectorCtx->hasDefragmentationPlan =
true;
14305 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14307 pCurrentMove, movesLeft);
14309 movesLeft -= processed;
14310 pCurrentMove += processed;
14317 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
14319 VkResult res = VK_SUCCESS;
14322 for(uint32_t memTypeIndex = 0;
14323 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14326 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14327 if(pBlockVectorCtx)
14329 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14331 if(!pBlockVectorCtx->hasDefragmentationPlan)
14333 res = VK_NOT_READY;
14337 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14338 pBlockVectorCtx, m_pStats);
14340 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14341 res = VK_NOT_READY;
14346 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14347 customCtxIndex < customCtxCount;
14350 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14351 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14353 if(!pBlockVectorCtx->hasDefragmentationPlan)
14355 res = VK_NOT_READY;
14359 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14360 pBlockVectorCtx, m_pStats);
14362 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14363 res = VK_NOT_READY;
14372 #if VMA_RECORDING_ENABLED
14374 VmaRecorder::VmaRecorder() :
14379 m_StartCounter(INT64_MAX)
14385 m_UseMutex = useMutex;
14386 m_Flags = settings.
flags;
14388 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
14389 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
14392 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
14395 return VK_ERROR_INITIALIZATION_FAILED;
14399 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
14400 fprintf(m_File,
"%s\n",
"1,8");
14405 VmaRecorder::~VmaRecorder()
14407 if(m_File != VMA_NULL)
14413 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
14415 CallParams callParams;
14416 GetBasicParams(callParams);
14418 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14419 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
14423 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
14425 CallParams callParams;
14426 GetBasicParams(callParams);
14428 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14429 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
14435 CallParams callParams;
14436 GetBasicParams(callParams);
14438 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14439 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
14450 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
14452 CallParams callParams;
14453 GetBasicParams(callParams);
14455 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14456 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
14461 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
14462 const VkMemoryRequirements& vkMemReq,
14466 CallParams callParams;
14467 GetBasicParams(callParams);
14469 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14470 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14471 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14473 vkMemReq.alignment,
14474 vkMemReq.memoryTypeBits,
14482 userDataStr.GetString());
14486 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
14487 const VkMemoryRequirements& vkMemReq,
14489 uint64_t allocationCount,
14492 CallParams callParams;
14493 GetBasicParams(callParams);
14495 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14496 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14497 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14499 vkMemReq.alignment,
14500 vkMemReq.memoryTypeBits,
14507 PrintPointerList(allocationCount, pAllocations);
14508 fprintf(m_File,
",%s\n", userDataStr.GetString());
14512 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
14513 const VkMemoryRequirements& vkMemReq,
14514 bool requiresDedicatedAllocation,
14515 bool prefersDedicatedAllocation,
14519 CallParams callParams;
14520 GetBasicParams(callParams);
14522 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14523 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14524 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14526 vkMemReq.alignment,
14527 vkMemReq.memoryTypeBits,
14528 requiresDedicatedAllocation ? 1 : 0,
14529 prefersDedicatedAllocation ? 1 : 0,
14537 userDataStr.GetString());
14541 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
14542 const VkMemoryRequirements& vkMemReq,
14543 bool requiresDedicatedAllocation,
14544 bool prefersDedicatedAllocation,
14548 CallParams callParams;
14549 GetBasicParams(callParams);
14551 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14552 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14553 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14555 vkMemReq.alignment,
14556 vkMemReq.memoryTypeBits,
14557 requiresDedicatedAllocation ? 1 : 0,
14558 prefersDedicatedAllocation ? 1 : 0,
14566 userDataStr.GetString());
14570 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
14573 CallParams callParams;
14574 GetBasicParams(callParams);
14576 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14577 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14582 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
14583 uint64_t allocationCount,
14586 CallParams callParams;
14587 GetBasicParams(callParams);
14589 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14590 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
14591 PrintPointerList(allocationCount, pAllocations);
14592 fprintf(m_File,
"\n");
14596 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
14598 const void* pUserData)
14600 CallParams callParams;
14601 GetBasicParams(callParams);
14603 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14604 UserDataString userDataStr(
14607 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14609 userDataStr.GetString());
14613 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
14616 CallParams callParams;
14617 GetBasicParams(callParams);
14619 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14620 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14625 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
14628 CallParams callParams;
14629 GetBasicParams(callParams);
14631 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14632 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14637 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
14640 CallParams callParams;
14641 GetBasicParams(callParams);
14643 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14644 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14649 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
14650 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14652 CallParams callParams;
14653 GetBasicParams(callParams);
14655 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14656 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14663 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
14664 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14666 CallParams callParams;
14667 GetBasicParams(callParams);
14669 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14670 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14677 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
14678 const VkBufferCreateInfo& bufCreateInfo,
14682 CallParams callParams;
14683 GetBasicParams(callParams);
14685 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14686 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14687 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14688 bufCreateInfo.flags,
14689 bufCreateInfo.size,
14690 bufCreateInfo.usage,
14691 bufCreateInfo.sharingMode,
14692 allocCreateInfo.
flags,
14693 allocCreateInfo.
usage,
14697 allocCreateInfo.
pool,
14699 userDataStr.GetString());
14703 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
14704 const VkImageCreateInfo& imageCreateInfo,
14708 CallParams callParams;
14709 GetBasicParams(callParams);
14711 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14712 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14713 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14714 imageCreateInfo.flags,
14715 imageCreateInfo.imageType,
14716 imageCreateInfo.format,
14717 imageCreateInfo.extent.width,
14718 imageCreateInfo.extent.height,
14719 imageCreateInfo.extent.depth,
14720 imageCreateInfo.mipLevels,
14721 imageCreateInfo.arrayLayers,
14722 imageCreateInfo.samples,
14723 imageCreateInfo.tiling,
14724 imageCreateInfo.usage,
14725 imageCreateInfo.sharingMode,
14726 imageCreateInfo.initialLayout,
14727 allocCreateInfo.
flags,
14728 allocCreateInfo.
usage,
14732 allocCreateInfo.
pool,
14734 userDataStr.GetString());
14738 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
14741 CallParams callParams;
14742 GetBasicParams(callParams);
14744 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14745 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
14750 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14753 CallParams callParams;
14754 GetBasicParams(callParams);
14756 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14757 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
14762 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
14765 CallParams callParams;
14766 GetBasicParams(callParams);
14768 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14769 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14774 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14777 CallParams callParams;
14778 GetBasicParams(callParams);
14780 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14781 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14786 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14789 CallParams callParams;
14790 GetBasicParams(callParams);
14792 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14793 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14798 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14802 CallParams callParams;
14803 GetBasicParams(callParams);
14805 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14806 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14809 fprintf(m_File,
",");
14811 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14821 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14824 CallParams callParams;
14825 GetBasicParams(callParams);
14827 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14828 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14833 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
14837 CallParams callParams;
14838 GetBasicParams(callParams);
14840 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14841 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14842 pool, name != VMA_NULL ? name :
"");
14848 if(pUserData != VMA_NULL)
14852 m_Str = (
const char*)pUserData;
14856 sprintf_s(m_PtrStr,
"%p", pUserData);
14866 void VmaRecorder::WriteConfiguration(
14867 const VkPhysicalDeviceProperties& devProps,
14868 const VkPhysicalDeviceMemoryProperties& memProps,
14869 uint32_t vulkanApiVersion,
14870 bool dedicatedAllocationExtensionEnabled,
14871 bool bindMemory2ExtensionEnabled,
14872 bool memoryBudgetExtensionEnabled,
14873 bool deviceCoherentMemoryExtensionEnabled)
14875 fprintf(m_File,
"Config,Begin\n");
14877 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
14879 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14880 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14881 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14882 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14883 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14884 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14886 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14887 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14888 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14890 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14891 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14893 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14894 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14896 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14897 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14899 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14900 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14903 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14904 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
14905 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
14906 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
14908 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14909 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14910 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14911 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14912 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14913 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14914 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14915 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14916 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14918 fprintf(m_File,
"Config,End\n");
14921 void VmaRecorder::GetBasicParams(CallParams& outParams)
14923 outParams.threadId = GetCurrentThreadId();
14925 LARGE_INTEGER counter;
14926 QueryPerformanceCounter(&counter);
14927 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14930 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14934 fprintf(m_File,
"%p", pItems[0]);
14935 for(uint64_t i = 1; i < count; ++i)
14937 fprintf(m_File,
" %p", pItems[i]);
14942 void VmaRecorder::Flush()
14950 #endif // #if VMA_RECORDING_ENABLED
14955 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14956 m_Allocator(pAllocationCallbacks, 1024)
14960 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
14962 VmaMutexLock mutexLock(m_Mutex);
14963 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
14966 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14968 VmaMutexLock mutexLock(m_Mutex);
14969 m_Allocator.Free(hAlloc);
14977 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
14982 m_hDevice(pCreateInfo->device),
14983 m_hInstance(pCreateInfo->instance),
14984 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14985 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14986 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14987 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14988 m_HeapSizeLimitMask(0),
14989 m_PreferredLargeHeapBlockSize(0),
14990 m_PhysicalDevice(pCreateInfo->physicalDevice),
14991 m_CurrentFrameIndex(0),
14992 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14993 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14995 m_GlobalMemoryTypeBits(UINT32_MAX)
14997 ,m_pRecorder(VMA_NULL)
15000 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15002 m_UseKhrDedicatedAllocation =
false;
15003 m_UseKhrBindMemory2 =
false;
15006 if(VMA_DEBUG_DETECT_CORRUPTION)
15009 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
15014 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15016 #if !(VMA_DEDICATED_ALLOCATION)
15019 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
15022 #if !(VMA_BIND_MEMORY2)
15025 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
15029 #if !(VMA_MEMORY_BUDGET)
15032 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
15035 #if VMA_VULKAN_VERSION < 1001000
15036 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15038 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
15042 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
15043 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
15044 memset(&m_MemProps, 0,
sizeof(m_MemProps));
15046 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
15047 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
15048 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
15058 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
15059 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
15061 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
15062 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
15063 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
15064 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
15069 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
15073 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15075 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
15076 if(limit != VK_WHOLE_SIZE)
15078 m_HeapSizeLimitMask |= 1u << heapIndex;
15079 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
15081 m_MemProps.memoryHeaps[heapIndex].size = limit;
15087 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15089 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
15091 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
15095 preferredBlockSize,
15098 GetBufferImageGranularity(),
15104 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
15111 VkResult res = VK_SUCCESS;
15116 #if VMA_RECORDING_ENABLED
15117 m_pRecorder = vma_new(
this, VmaRecorder)();
15119 if(res != VK_SUCCESS)
15123 m_pRecorder->WriteConfiguration(
15124 m_PhysicalDeviceProperties,
15126 m_VulkanApiVersion,
15127 m_UseKhrDedicatedAllocation,
15128 m_UseKhrBindMemory2,
15129 m_UseExtMemoryBudget,
15130 m_UseAmdDeviceCoherentMemory);
15131 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
15133 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
15134 return VK_ERROR_FEATURE_NOT_PRESENT;
15138 #if VMA_MEMORY_BUDGET
15139 if(m_UseExtMemoryBudget)
15141 UpdateVulkanBudget();
15143 #endif // #if VMA_MEMORY_BUDGET
15148 VmaAllocator_T::~VmaAllocator_T()
15150 #if VMA_RECORDING_ENABLED
15151 if(m_pRecorder != VMA_NULL)
15153 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
15154 vma_delete(
this, m_pRecorder);
15158 VMA_ASSERT(m_Pools.empty());
15160 for(
size_t i = GetMemoryTypeCount(); i--; )
15162 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
15164 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
15167 vma_delete(
this, m_pDedicatedAllocations[i]);
15168 vma_delete(
this, m_pBlockVectors[i]);
15172 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
15174 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15175 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
15176 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
15177 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
15178 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
15179 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
15180 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
15181 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
15182 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
15183 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
15184 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
15185 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
15186 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
15187 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
15188 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
15189 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
15190 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
15191 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
15192 #if VMA_VULKAN_VERSION >= 1001000
15193 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15195 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
15196 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
15197 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2");
15198 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
15199 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2");
15200 m_VulkanFunctions.vkBindBufferMemory2KHR =
15201 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2");
15202 m_VulkanFunctions.vkBindImageMemory2KHR =
15203 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2");
15204 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
15205 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2");
15208 #if VMA_DEDICATED_ALLOCATION
15209 if(m_UseKhrDedicatedAllocation)
15211 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
15212 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
15213 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
15214 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
15217 #if VMA_BIND_MEMORY2
15218 if(m_UseKhrBindMemory2)
15220 m_VulkanFunctions.vkBindBufferMemory2KHR =
15221 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2KHR");
15222 m_VulkanFunctions.vkBindImageMemory2KHR =
15223 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2KHR");
15225 #endif // #if VMA_BIND_MEMORY2
15226 #if VMA_MEMORY_BUDGET
15227 if(m_UseExtMemoryBudget && m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15229 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
15230 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
15231 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2KHR");
15233 #endif // #if VMA_MEMORY_BUDGET
15234 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15236 #define VMA_COPY_IF_NOT_NULL(funcName) \
15237 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
15239 if(pVulkanFunctions != VMA_NULL)
15241 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
15242 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
15243 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
15244 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
15245 VMA_COPY_IF_NOT_NULL(vkMapMemory);
15246 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
15247 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
15248 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
15249 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
15250 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
15251 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
15252 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
15253 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
15254 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
15255 VMA_COPY_IF_NOT_NULL(vkCreateImage);
15256 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
15257 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
15258 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15259 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
15260 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
15262 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15263 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
15264 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
15266 #if VMA_MEMORY_BUDGET
15267 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
15271 #undef VMA_COPY_IF_NOT_NULL
15275 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
15276 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
15277 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
15278 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
15279 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
15280 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
15281 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
15282 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
15283 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
15284 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
15285 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
15286 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
15287 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
15288 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
15289 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
15290 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
15291 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
15292 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15293 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
15295 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
15296 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
15299 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15300 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
15302 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
15303 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
15306 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
15307 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15309 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
15314 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
15316 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15317 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15318 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
15319 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
15322 VkResult VmaAllocator_T::AllocateMemoryOfType(
15324 VkDeviceSize alignment,
15325 bool dedicatedAllocation,
15326 VkBuffer dedicatedBuffer,
15327 VkImage dedicatedImage,
15329 uint32_t memTypeIndex,
15330 VmaSuballocationType suballocType,
15331 size_t allocationCount,
15334 VMA_ASSERT(pAllocations != VMA_NULL);
15335 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
15341 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15351 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
15352 VMA_ASSERT(blockVector);
15354 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
15355 bool preferDedicatedMemory =
15356 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
15357 dedicatedAllocation ||
15359 size > preferredBlockSize / 2;
15361 if(preferDedicatedMemory &&
15363 finalCreateInfo.
pool == VK_NULL_HANDLE)
15372 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15376 return AllocateDedicatedMemory(
15392 VkResult res = blockVector->Allocate(
15393 m_CurrentFrameIndex.load(),
15400 if(res == VK_SUCCESS)
15408 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15412 res = AllocateDedicatedMemory(
15424 if(res == VK_SUCCESS)
15427 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
15433 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15440 VkResult VmaAllocator_T::AllocateDedicatedMemory(
15442 VmaSuballocationType suballocType,
15443 uint32_t memTypeIndex,
15446 bool isUserDataString,
15448 VkBuffer dedicatedBuffer,
15449 VkImage dedicatedImage,
15450 size_t allocationCount,
15453 VMA_ASSERT(allocationCount > 0 && pAllocations);
15457 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15459 GetBudget(&heapBudget, heapIndex, 1);
15460 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
15462 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15466 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
15467 allocInfo.memoryTypeIndex = memTypeIndex;
15468 allocInfo.allocationSize = size;
15470 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15471 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
15472 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15474 if(dedicatedBuffer != VK_NULL_HANDLE)
15476 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
15477 dedicatedAllocInfo.buffer = dedicatedBuffer;
15478 allocInfo.pNext = &dedicatedAllocInfo;
15480 else if(dedicatedImage != VK_NULL_HANDLE)
15482 dedicatedAllocInfo.image = dedicatedImage;
15483 allocInfo.pNext = &dedicatedAllocInfo;
15486 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15489 VkResult res = VK_SUCCESS;
15490 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15492 res = AllocateDedicatedMemoryPage(
15500 pAllocations + allocIndex);
15501 if(res != VK_SUCCESS)
15507 if(res == VK_SUCCESS)
15511 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15512 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15513 VMA_ASSERT(pDedicatedAllocations);
15514 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15516 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
15520 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
15525 while(allocIndex--)
15528 VkDeviceMemory hMemory = currAlloc->GetMemory();
15540 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
15541 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
15542 currAlloc->SetUserData(
this, VMA_NULL);
15543 m_AllocationObjectAllocator.Free(currAlloc);
15546 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15552 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
15554 VmaSuballocationType suballocType,
15555 uint32_t memTypeIndex,
15556 const VkMemoryAllocateInfo& allocInfo,
15558 bool isUserDataString,
15562 VkDeviceMemory hMemory = VK_NULL_HANDLE;
15563 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
15566 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15570 void* pMappedData = VMA_NULL;
15573 res = (*m_VulkanFunctions.vkMapMemory)(
15582 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
15583 FreeVulkanMemory(memTypeIndex, size, hMemory);
15588 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
15589 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
15590 (*pAllocation)->SetUserData(
this, pUserData);
15591 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
15592 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15594 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
15600 void VmaAllocator_T::GetBufferMemoryRequirements(
15602 VkMemoryRequirements& memReq,
15603 bool& requiresDedicatedAllocation,
15604 bool& prefersDedicatedAllocation)
const
15606 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15607 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15609 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
15610 memReqInfo.buffer = hBuffer;
15612 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15614 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15615 memReq2.pNext = &memDedicatedReq;
15617 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15619 memReq = memReq2.memoryRequirements;
15620 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15621 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15624 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15626 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
15627 requiresDedicatedAllocation =
false;
15628 prefersDedicatedAllocation =
false;
15632 void VmaAllocator_T::GetImageMemoryRequirements(
15634 VkMemoryRequirements& memReq,
15635 bool& requiresDedicatedAllocation,
15636 bool& prefersDedicatedAllocation)
const
15638 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15639 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15641 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
15642 memReqInfo.image = hImage;
15644 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15646 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15647 memReq2.pNext = &memDedicatedReq;
15649 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15651 memReq = memReq2.memoryRequirements;
15652 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15653 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15656 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15658 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
15659 requiresDedicatedAllocation =
false;
15660 prefersDedicatedAllocation =
false;
15664 VkResult VmaAllocator_T::AllocateMemory(
15665 const VkMemoryRequirements& vkMemReq,
15666 bool requiresDedicatedAllocation,
15667 bool prefersDedicatedAllocation,
15668 VkBuffer dedicatedBuffer,
15669 VkImage dedicatedImage,
15671 VmaSuballocationType suballocType,
15672 size_t allocationCount,
15675 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15677 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
15679 if(vkMemReq.size == 0)
15681 return VK_ERROR_VALIDATION_FAILED_EXT;
15686 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
15687 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15692 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
15693 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15695 if(requiresDedicatedAllocation)
15699 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
15700 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15702 if(createInfo.
pool != VK_NULL_HANDLE)
15704 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
15705 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15708 if((createInfo.
pool != VK_NULL_HANDLE) &&
15711 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
15712 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15715 if(createInfo.
pool != VK_NULL_HANDLE)
15717 const VkDeviceSize alignmentForPool = VMA_MAX(
15718 vkMemReq.alignment,
15719 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
15724 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15729 return createInfo.
pool->m_BlockVector.Allocate(
15730 m_CurrentFrameIndex.load(),
15741 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
15742 uint32_t memTypeIndex = UINT32_MAX;
15744 if(res == VK_SUCCESS)
15746 VkDeviceSize alignmentForMemType = VMA_MAX(
15747 vkMemReq.alignment,
15748 GetMemoryTypeMinAlignment(memTypeIndex));
15750 res = AllocateMemoryOfType(
15752 alignmentForMemType,
15753 requiresDedicatedAllocation || prefersDedicatedAllocation,
15762 if(res == VK_SUCCESS)
15772 memoryTypeBits &= ~(1u << memTypeIndex);
15775 if(res == VK_SUCCESS)
15777 alignmentForMemType = VMA_MAX(
15778 vkMemReq.alignment,
15779 GetMemoryTypeMinAlignment(memTypeIndex));
15781 res = AllocateMemoryOfType(
15783 alignmentForMemType,
15784 requiresDedicatedAllocation || prefersDedicatedAllocation,
15793 if(res == VK_SUCCESS)
15803 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15814 void VmaAllocator_T::FreeMemory(
15815 size_t allocationCount,
15818 VMA_ASSERT(pAllocations);
15820 for(
size_t allocIndex = allocationCount; allocIndex--; )
15824 if(allocation != VK_NULL_HANDLE)
15826 if(TouchAllocation(allocation))
15828 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15830 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
15833 switch(allocation->GetType())
15835 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15837 VmaBlockVector* pBlockVector = VMA_NULL;
15838 VmaPool hPool = allocation->GetBlock()->GetParentPool();
15839 if(hPool != VK_NULL_HANDLE)
15841 pBlockVector = &hPool->m_BlockVector;
15845 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15846 pBlockVector = m_pBlockVectors[memTypeIndex];
15848 pBlockVector->Free(allocation);
15851 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15852 FreeDedicatedMemory(allocation);
15860 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
15861 allocation->SetUserData(
this, VMA_NULL);
15862 m_AllocationObjectAllocator.Free(allocation);
15867 VkResult VmaAllocator_T::ResizeAllocation(
15869 VkDeviceSize newSize)
15872 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
15874 return VK_ERROR_VALIDATION_FAILED_EXT;
15876 if(newSize == alloc->GetSize())
15880 return VK_ERROR_OUT_OF_POOL_MEMORY;
15883 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
15886 InitStatInfo(pStats->
total);
15887 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
15889 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
15893 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15895 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15896 VMA_ASSERT(pBlockVector);
15897 pBlockVector->AddStats(pStats);
15902 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15903 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15905 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
15910 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15912 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15913 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15914 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15915 VMA_ASSERT(pDedicatedAllocVector);
15916 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
15919 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
15920 VmaAddStatInfo(pStats->
total, allocationStatInfo);
15921 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
15922 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
15927 VmaPostprocessCalcStatInfo(pStats->
total);
15928 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
15929 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
15930 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
15931 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15934 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
15936 #if VMA_MEMORY_BUDGET
15937 if(m_UseExtMemoryBudget)
15939 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
15941 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
15942 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15944 const uint32_t heapIndex = firstHeap + i;
15946 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15949 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
15951 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
15952 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
15956 outBudget->
usage = 0;
15960 outBudget->
budget = VMA_MIN(
15961 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
15966 UpdateVulkanBudget();
15967 GetBudget(outBudget, firstHeap, heapCount);
15973 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
15975 const uint32_t heapIndex = firstHeap + i;
15977 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
15981 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
15986 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15988 VkResult VmaAllocator_T::DefragmentationBegin(
15998 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15999 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
16002 (*pContext)->AddAllocations(
16005 VkResult res = (*pContext)->Defragment(
16010 if(res != VK_NOT_READY)
16012 vma_delete(
this, *pContext);
16013 *pContext = VMA_NULL;
16019 VkResult VmaAllocator_T::DefragmentationEnd(
16022 vma_delete(
this, context);
16026 VkResult VmaAllocator_T::DefragmentationPassBegin(
16030 return context->DefragmentPassBegin(pInfo);
16032 VkResult VmaAllocator_T::DefragmentationPassEnd(
16035 return context->DefragmentPassEnd();
16041 if(hAllocation->CanBecomeLost())
16047 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16048 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16051 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16055 pAllocationInfo->
offset = 0;
16056 pAllocationInfo->
size = hAllocation->GetSize();
16058 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16061 else if(localLastUseFrameIndex == localCurrFrameIndex)
16063 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16064 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16065 pAllocationInfo->
offset = hAllocation->GetOffset();
16066 pAllocationInfo->
size = hAllocation->GetSize();
16068 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16073 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16075 localLastUseFrameIndex = localCurrFrameIndex;
16082 #if VMA_STATS_STRING_ENABLED
16083 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16084 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16087 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16088 if(localLastUseFrameIndex == localCurrFrameIndex)
16094 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16096 localLastUseFrameIndex = localCurrFrameIndex;
16102 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16103 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16104 pAllocationInfo->
offset = hAllocation->GetOffset();
16105 pAllocationInfo->
size = hAllocation->GetSize();
16106 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
16107 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16111 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
16114 if(hAllocation->CanBecomeLost())
16116 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16117 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16120 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16124 else if(localLastUseFrameIndex == localCurrFrameIndex)
16130 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16132 localLastUseFrameIndex = localCurrFrameIndex;
16139 #if VMA_STATS_STRING_ENABLED
16140 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16141 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16144 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16145 if(localLastUseFrameIndex == localCurrFrameIndex)
16151 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16153 localLastUseFrameIndex = localCurrFrameIndex;
16165 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
16175 return VK_ERROR_INITIALIZATION_FAILED;
16179 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
16181 return VK_ERROR_FEATURE_NOT_PRESENT;
16184 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
16186 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
16188 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
16189 if(res != VK_SUCCESS)
16191 vma_delete(
this, *pPool);
16198 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16199 (*pPool)->SetId(m_NextPoolId++);
16200 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
16206 void VmaAllocator_T::DestroyPool(
VmaPool pool)
16210 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16211 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
16212 VMA_ASSERT(success &&
"Pool not found in Allocator.");
16215 vma_delete(
this, pool);
16220 pool->m_BlockVector.GetPoolStats(pPoolStats);
16223 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
16225 m_CurrentFrameIndex.store(frameIndex);
16227 #if VMA_MEMORY_BUDGET
16228 if(m_UseExtMemoryBudget)
16230 UpdateVulkanBudget();
16232 #endif // #if VMA_MEMORY_BUDGET
16235 void VmaAllocator_T::MakePoolAllocationsLost(
16237 size_t* pLostAllocationCount)
16239 hPool->m_BlockVector.MakePoolAllocationsLost(
16240 m_CurrentFrameIndex.load(),
16241 pLostAllocationCount);
16244 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
16246 return hPool->m_BlockVector.CheckCorruption();
16249 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
16251 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
16254 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16256 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
16258 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16259 VMA_ASSERT(pBlockVector);
16260 VkResult localRes = pBlockVector->CheckCorruption();
16263 case VK_ERROR_FEATURE_NOT_PRESENT:
16266 finalRes = VK_SUCCESS;
16276 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16277 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16279 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
16281 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
16284 case VK_ERROR_FEATURE_NOT_PRESENT:
16287 finalRes = VK_SUCCESS;
16299 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
16301 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
16302 (*pAllocation)->InitLost();
16305 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
16307 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
16310 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
16312 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16313 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
16316 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
16317 if(blockBytesAfterAllocation > heapSize)
16319 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16321 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
16329 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
16333 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
16335 if(res == VK_SUCCESS)
16337 #if VMA_MEMORY_BUDGET
16338 ++m_Budget.m_OperationsSinceBudgetFetch;
16342 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
16344 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
16349 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
16355 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
16358 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
16360 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
16364 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
16366 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
16369 VkResult VmaAllocator_T::BindVulkanBuffer(
16370 VkDeviceMemory memory,
16371 VkDeviceSize memoryOffset,
16375 if(pNext != VMA_NULL)
16377 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16378 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16379 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
16381 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
16382 bindBufferMemoryInfo.pNext = pNext;
16383 bindBufferMemoryInfo.buffer = buffer;
16384 bindBufferMemoryInfo.memory = memory;
16385 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16386 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16389 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16391 return VK_ERROR_EXTENSION_NOT_PRESENT;
16396 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
16400 VkResult VmaAllocator_T::BindVulkanImage(
16401 VkDeviceMemory memory,
16402 VkDeviceSize memoryOffset,
16406 if(pNext != VMA_NULL)
16408 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16409 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16410 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
16412 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
16413 bindBufferMemoryInfo.pNext = pNext;
16414 bindBufferMemoryInfo.image = image;
16415 bindBufferMemoryInfo.memory = memory;
16416 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16417 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16420 #endif // #if VMA_BIND_MEMORY2
16422 return VK_ERROR_EXTENSION_NOT_PRESENT;
16427 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
16431 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
16433 if(hAllocation->CanBecomeLost())
16435 return VK_ERROR_MEMORY_MAP_FAILED;
16438 switch(hAllocation->GetType())
16440 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16442 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16443 char *pBytes = VMA_NULL;
16444 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
16445 if(res == VK_SUCCESS)
16447 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
16448 hAllocation->BlockAllocMap();
16452 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16453 return hAllocation->DedicatedAllocMap(
this, ppData);
16456 return VK_ERROR_MEMORY_MAP_FAILED;
16462 switch(hAllocation->GetType())
16464 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16466 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16467 hAllocation->BlockAllocUnmap();
16468 pBlock->Unmap(
this, 1);
16471 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16472 hAllocation->DedicatedAllocUnmap(
this);
16479 VkResult VmaAllocator_T::BindBufferMemory(
16481 VkDeviceSize allocationLocalOffset,
16485 VkResult res = VK_SUCCESS;
16486 switch(hAllocation->GetType())
16488 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16489 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
16491 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16493 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16494 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
16495 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
16504 VkResult VmaAllocator_T::BindImageMemory(
16506 VkDeviceSize allocationLocalOffset,
16510 VkResult res = VK_SUCCESS;
16511 switch(hAllocation->GetType())
16513 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16514 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
16516 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16518 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
16519 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
16520 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
16529 void VmaAllocator_T::FlushOrInvalidateAllocation(
16531 VkDeviceSize offset, VkDeviceSize size,
16532 VMA_CACHE_OPERATION op)
16534 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
16535 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
16537 const VkDeviceSize allocationSize = hAllocation->GetSize();
16538 VMA_ASSERT(offset <= allocationSize);
16540 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
16542 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
16543 memRange.memory = hAllocation->GetMemory();
16545 switch(hAllocation->GetType())
16547 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16548 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16549 if(size == VK_WHOLE_SIZE)
16551 memRange.size = allocationSize - memRange.offset;
16555 VMA_ASSERT(offset + size <= allocationSize);
16556 memRange.size = VMA_MIN(
16557 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
16558 allocationSize - memRange.offset);
16562 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16565 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16566 if(size == VK_WHOLE_SIZE)
16568 size = allocationSize - offset;
16572 VMA_ASSERT(offset + size <= allocationSize);
16574 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
16577 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
16578 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
16579 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
16580 memRange.offset += allocationOffset;
16581 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
16592 case VMA_CACHE_FLUSH:
16593 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
16595 case VMA_CACHE_INVALIDATE:
16596 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
16605 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
16607 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
16609 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16611 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16612 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16613 VMA_ASSERT(pDedicatedAllocations);
16614 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
16615 VMA_ASSERT(success);
16618 VkDeviceMemory hMemory = allocation->GetMemory();
16630 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
16632 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
16635 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
16637 VkBufferCreateInfo dummyBufCreateInfo;
16638 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
16640 uint32_t memoryTypeBits = 0;
16643 VkBuffer buf = VK_NULL_HANDLE;
16644 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
16645 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
16646 if(res == VK_SUCCESS)
16649 VkMemoryRequirements memReq;
16650 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
16651 memoryTypeBits = memReq.memoryTypeBits;
16654 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
16657 return memoryTypeBits;
16660 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
16663 VMA_ASSERT(GetMemoryTypeCount() > 0);
16665 uint32_t memoryTypeBits = UINT32_MAX;
16667 if(!m_UseAmdDeviceCoherentMemory)
16670 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16672 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
16674 memoryTypeBits &= ~(1u << memTypeIndex);
16679 return memoryTypeBits;
16682 #if VMA_MEMORY_BUDGET
16684 void VmaAllocator_T::UpdateVulkanBudget()
16686 VMA_ASSERT(m_UseExtMemoryBudget);
16688 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
16690 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
16691 memProps.pNext = &budgetProps;
16693 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
16696 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
16698 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16700 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
16701 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
16702 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
16704 m_Budget.m_OperationsSinceBudgetFetch = 0;
16708 #endif // #if VMA_MEMORY_BUDGET
16710 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
16712 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
16713 !hAllocation->CanBecomeLost() &&
16714 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16716 void* pData = VMA_NULL;
16717 VkResult res = Map(hAllocation, &pData);
16718 if(res == VK_SUCCESS)
16720 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
16721 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
16722 Unmap(hAllocation);
16726 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
16731 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
16733 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
16734 if(memoryTypeBits == UINT32_MAX)
16736 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
16737 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
16739 return memoryTypeBits;
16742 #if VMA_STATS_STRING_ENABLED
16744 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
16746 bool dedicatedAllocationsStarted =
false;
16747 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16749 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16750 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16751 VMA_ASSERT(pDedicatedAllocVector);
16752 if(pDedicatedAllocVector->empty() ==
false)
16754 if(dedicatedAllocationsStarted ==
false)
16756 dedicatedAllocationsStarted =
true;
16757 json.WriteString(
"DedicatedAllocations");
16758 json.BeginObject();
16761 json.BeginString(
"Type ");
16762 json.ContinueString(memTypeIndex);
16767 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
16769 json.BeginObject(
true);
16771 hAlloc->PrintParameters(json);
16778 if(dedicatedAllocationsStarted)
16784 bool allocationsStarted =
false;
16785 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16787 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
16789 if(allocationsStarted ==
false)
16791 allocationsStarted =
true;
16792 json.WriteString(
"DefaultPools");
16793 json.BeginObject();
16796 json.BeginString(
"Type ");
16797 json.ContinueString(memTypeIndex);
16800 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
16803 if(allocationsStarted)
16811 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16812 const size_t poolCount = m_Pools.size();
16815 json.WriteString(
"Pools");
16816 json.BeginObject();
16817 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
16819 json.BeginString();
16820 json.ContinueString(m_Pools[poolIndex]->GetId());
16823 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
16830 #endif // #if VMA_STATS_STRING_ENABLED
16839 VMA_ASSERT(pCreateInfo && pAllocator);
16842 VMA_DEBUG_LOG(
"vmaCreateAllocator");
16844 return (*pAllocator)->Init(pCreateInfo);
16850 if(allocator != VK_NULL_HANDLE)
16852 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
16853 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
16854 vma_delete(&allocationCallbacks, allocator);
16860 VMA_ASSERT(allocator && pAllocatorInfo);
16861 pAllocatorInfo->
instance = allocator->m_hInstance;
16862 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
16863 pAllocatorInfo->
device = allocator->m_hDevice;
16868 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
16870 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
16871 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
16876 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
16878 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
16879 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
16884 uint32_t memoryTypeIndex,
16885 VkMemoryPropertyFlags* pFlags)
16887 VMA_ASSERT(allocator && pFlags);
16888 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
16889 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
16894 uint32_t frameIndex)
16896 VMA_ASSERT(allocator);
16897 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
16899 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16901 allocator->SetCurrentFrameIndex(frameIndex);
16908 VMA_ASSERT(allocator && pStats);
16909 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16910 allocator->CalculateStats(pStats);
16917 VMA_ASSERT(allocator && pBudget);
16918 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16919 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
16922 #if VMA_STATS_STRING_ENABLED
16926 char** ppStatsString,
16927 VkBool32 detailedMap)
16929 VMA_ASSERT(allocator && ppStatsString);
16930 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16932 VmaStringBuilder sb(allocator);
16934 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
16935 json.BeginObject();
16938 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
16941 allocator->CalculateStats(&stats);
16943 json.WriteString(
"Total");
16944 VmaPrintStatInfo(json, stats.
total);
16946 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
16948 json.BeginString(
"Heap ");
16949 json.ContinueString(heapIndex);
16951 json.BeginObject();
16953 json.WriteString(
"Size");
16954 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
16956 json.WriteString(
"Flags");
16957 json.BeginArray(
true);
16958 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
16960 json.WriteString(
"DEVICE_LOCAL");
16964 json.WriteString(
"Budget");
16965 json.BeginObject();
16967 json.WriteString(
"BlockBytes");
16968 json.WriteNumber(budget[heapIndex].blockBytes);
16969 json.WriteString(
"AllocationBytes");
16970 json.WriteNumber(budget[heapIndex].allocationBytes);
16971 json.WriteString(
"Usage");
16972 json.WriteNumber(budget[heapIndex].usage);
16973 json.WriteString(
"Budget");
16974 json.WriteNumber(budget[heapIndex].budget);
16980 json.WriteString(
"Stats");
16981 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
16984 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
16986 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
16988 json.BeginString(
"Type ");
16989 json.ContinueString(typeIndex);
16992 json.BeginObject();
16994 json.WriteString(
"Flags");
16995 json.BeginArray(
true);
16996 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
16997 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
16999 json.WriteString(
"DEVICE_LOCAL");
17001 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17003 json.WriteString(
"HOST_VISIBLE");
17005 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
17007 json.WriteString(
"HOST_COHERENT");
17009 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
17011 json.WriteString(
"HOST_CACHED");
17013 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
17015 json.WriteString(
"LAZILY_ALLOCATED");
17017 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
17019 json.WriteString(
" PROTECTED");
17021 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17023 json.WriteString(
" DEVICE_COHERENT");
17025 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
17027 json.WriteString(
" DEVICE_UNCACHED");
17033 json.WriteString(
"Stats");
17034 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
17043 if(detailedMap == VK_TRUE)
17045 allocator->PrintDetailedMap(json);
17051 const size_t len = sb.GetLength();
17052 char*
const pChars = vma_new_array(allocator,
char, len + 1);
17055 memcpy(pChars, sb.GetData(), len);
17057 pChars[len] =
'\0';
17058 *ppStatsString = pChars;
17063 char* pStatsString)
17065 if(pStatsString != VMA_NULL)
17067 VMA_ASSERT(allocator);
17068 size_t len = strlen(pStatsString);
17069 vma_delete_array(allocator, pStatsString, len + 1);
17073 #endif // #if VMA_STATS_STRING_ENABLED
17080 uint32_t memoryTypeBits,
17082 uint32_t* pMemoryTypeIndex)
17084 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17085 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17086 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17088 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
17095 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
17096 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
17097 uint32_t notPreferredFlags = 0;
17100 switch(pAllocationCreateInfo->
usage)
17105 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17107 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17111 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
17114 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17115 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17117 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17121 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17122 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
17125 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17128 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
17137 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
17139 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
17142 *pMemoryTypeIndex = UINT32_MAX;
17143 uint32_t minCost = UINT32_MAX;
17144 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
17145 memTypeIndex < allocator->GetMemoryTypeCount();
17146 ++memTypeIndex, memTypeBit <<= 1)
17149 if((memTypeBit & memoryTypeBits) != 0)
17151 const VkMemoryPropertyFlags currFlags =
17152 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
17154 if((requiredFlags & ~currFlags) == 0)
17157 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
17158 VmaCountBitsSet(currFlags & notPreferredFlags);
17160 if(currCost < minCost)
17162 *pMemoryTypeIndex = memTypeIndex;
17167 minCost = currCost;
17172 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
17177 const VkBufferCreateInfo* pBufferCreateInfo,
17179 uint32_t* pMemoryTypeIndex)
17181 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17182 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
17183 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17184 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17186 const VkDevice hDev = allocator->m_hDevice;
17187 VkBuffer hBuffer = VK_NULL_HANDLE;
17188 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
17189 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
17190 if(res == VK_SUCCESS)
17192 VkMemoryRequirements memReq = {};
17193 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
17194 hDev, hBuffer, &memReq);
17198 memReq.memoryTypeBits,
17199 pAllocationCreateInfo,
17202 allocator->GetVulkanFunctions().vkDestroyBuffer(
17203 hDev, hBuffer, allocator->GetAllocationCallbacks());
17210 const VkImageCreateInfo* pImageCreateInfo,
17212 uint32_t* pMemoryTypeIndex)
17214 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17215 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
17216 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17217 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17219 const VkDevice hDev = allocator->m_hDevice;
17220 VkImage hImage = VK_NULL_HANDLE;
17221 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
17222 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
17223 if(res == VK_SUCCESS)
17225 VkMemoryRequirements memReq = {};
17226 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
17227 hDev, hImage, &memReq);
17231 memReq.memoryTypeBits,
17232 pAllocationCreateInfo,
17235 allocator->GetVulkanFunctions().vkDestroyImage(
17236 hDev, hImage, allocator->GetAllocationCallbacks());
17246 VMA_ASSERT(allocator && pCreateInfo && pPool);
17248 VMA_DEBUG_LOG(
"vmaCreatePool");
17250 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17252 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
17254 #if VMA_RECORDING_ENABLED
17255 if(allocator->GetRecorder() != VMA_NULL)
17257 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
17268 VMA_ASSERT(allocator);
17270 if(pool == VK_NULL_HANDLE)
17275 VMA_DEBUG_LOG(
"vmaDestroyPool");
17277 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17279 #if VMA_RECORDING_ENABLED
17280 if(allocator->GetRecorder() != VMA_NULL)
17282 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
17286 allocator->DestroyPool(pool);
17294 VMA_ASSERT(allocator && pool && pPoolStats);
17296 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17298 allocator->GetPoolStats(pool, pPoolStats);
17304 size_t* pLostAllocationCount)
17306 VMA_ASSERT(allocator && pool);
17308 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17310 #if VMA_RECORDING_ENABLED
17311 if(allocator->GetRecorder() != VMA_NULL)
17313 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
17317 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
17322 VMA_ASSERT(allocator && pool);
17324 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17326 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
17328 return allocator->CheckPoolCorruption(pool);
17334 const char** ppName)
17336 VMA_ASSERT(allocator && pool);
17338 VMA_DEBUG_LOG(
"vmaGetPoolName");
17340 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17342 *ppName = pool->GetName();
17350 VMA_ASSERT(allocator && pool);
17352 VMA_DEBUG_LOG(
"vmaSetPoolName");
17354 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17356 pool->SetName(pName);
17358 #if VMA_RECORDING_ENABLED
17359 if(allocator->GetRecorder() != VMA_NULL)
17361 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
17368 const VkMemoryRequirements* pVkMemoryRequirements,
17373 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
17375 VMA_DEBUG_LOG(
"vmaAllocateMemory");
17377 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17379 VkResult result = allocator->AllocateMemory(
17380 *pVkMemoryRequirements,
17386 VMA_SUBALLOCATION_TYPE_UNKNOWN,
17390 #if VMA_RECORDING_ENABLED
17391 if(allocator->GetRecorder() != VMA_NULL)
17393 allocator->GetRecorder()->RecordAllocateMemory(
17394 allocator->GetCurrentFrameIndex(),
17395 *pVkMemoryRequirements,
17401 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
17403 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17411 const VkMemoryRequirements* pVkMemoryRequirements,
17413 size_t allocationCount,
17417 if(allocationCount == 0)
17422 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
17424 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
17426 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17428 VkResult result = allocator->AllocateMemory(
17429 *pVkMemoryRequirements,
17435 VMA_SUBALLOCATION_TYPE_UNKNOWN,
17439 #if VMA_RECORDING_ENABLED
17440 if(allocator->GetRecorder() != VMA_NULL)
17442 allocator->GetRecorder()->RecordAllocateMemoryPages(
17443 allocator->GetCurrentFrameIndex(),
17444 *pVkMemoryRequirements,
17446 (uint64_t)allocationCount,
17451 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
17453 for(
size_t i = 0; i < allocationCount; ++i)
17455 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
17469 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17471 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
17473 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17475 VkMemoryRequirements vkMemReq = {};
17476 bool requiresDedicatedAllocation =
false;
17477 bool prefersDedicatedAllocation =
false;
17478 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
17479 requiresDedicatedAllocation,
17480 prefersDedicatedAllocation);
17482 VkResult result = allocator->AllocateMemory(
17484 requiresDedicatedAllocation,
17485 prefersDedicatedAllocation,
17489 VMA_SUBALLOCATION_TYPE_BUFFER,
17493 #if VMA_RECORDING_ENABLED
17494 if(allocator->GetRecorder() != VMA_NULL)
17496 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
17497 allocator->GetCurrentFrameIndex(),
17499 requiresDedicatedAllocation,
17500 prefersDedicatedAllocation,
17506 if(pAllocationInfo && result == VK_SUCCESS)
17508 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17521 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17523 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
17525 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17527 VkMemoryRequirements vkMemReq = {};
17528 bool requiresDedicatedAllocation =
false;
17529 bool prefersDedicatedAllocation =
false;
17530 allocator->GetImageMemoryRequirements(image, vkMemReq,
17531 requiresDedicatedAllocation, prefersDedicatedAllocation);
17533 VkResult result = allocator->AllocateMemory(
17535 requiresDedicatedAllocation,
17536 prefersDedicatedAllocation,
17540 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
17544 #if VMA_RECORDING_ENABLED
17545 if(allocator->GetRecorder() != VMA_NULL)
17547 allocator->GetRecorder()->RecordAllocateMemoryForImage(
17548 allocator->GetCurrentFrameIndex(),
17550 requiresDedicatedAllocation,
17551 prefersDedicatedAllocation,
17557 if(pAllocationInfo && result == VK_SUCCESS)
17559 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17569 VMA_ASSERT(allocator);
17571 if(allocation == VK_NULL_HANDLE)
17576 VMA_DEBUG_LOG(
"vmaFreeMemory");
17578 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17580 #if VMA_RECORDING_ENABLED
17581 if(allocator->GetRecorder() != VMA_NULL)
17583 allocator->GetRecorder()->RecordFreeMemory(
17584 allocator->GetCurrentFrameIndex(),
17589 allocator->FreeMemory(
17596 size_t allocationCount,
17599 if(allocationCount == 0)
17604 VMA_ASSERT(allocator);
17606 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
17608 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17610 #if VMA_RECORDING_ENABLED
17611 if(allocator->GetRecorder() != VMA_NULL)
17613 allocator->GetRecorder()->RecordFreeMemoryPages(
17614 allocator->GetCurrentFrameIndex(),
17615 (uint64_t)allocationCount,
17620 allocator->FreeMemory(allocationCount, pAllocations);
17626 VkDeviceSize newSize)
17628 VMA_ASSERT(allocator && allocation);
17630 VMA_DEBUG_LOG(
"vmaResizeAllocation");
17632 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17634 return allocator->ResizeAllocation(allocation, newSize);
17642 VMA_ASSERT(allocator && allocation && pAllocationInfo);
17644 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17646 #if VMA_RECORDING_ENABLED
17647 if(allocator->GetRecorder() != VMA_NULL)
17649 allocator->GetRecorder()->RecordGetAllocationInfo(
17650 allocator->GetCurrentFrameIndex(),
17655 allocator->GetAllocationInfo(allocation, pAllocationInfo);
17662 VMA_ASSERT(allocator && allocation);
17664 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17666 #if VMA_RECORDING_ENABLED
17667 if(allocator->GetRecorder() != VMA_NULL)
17669 allocator->GetRecorder()->RecordTouchAllocation(
17670 allocator->GetCurrentFrameIndex(),
17675 return allocator->TouchAllocation(allocation);
17683 VMA_ASSERT(allocator && allocation);
17685 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17687 allocation->SetUserData(allocator, pUserData);
17689 #if VMA_RECORDING_ENABLED
17690 if(allocator->GetRecorder() != VMA_NULL)
17692 allocator->GetRecorder()->RecordSetAllocationUserData(
17693 allocator->GetCurrentFrameIndex(),
17704 VMA_ASSERT(allocator && pAllocation);
17706 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17708 allocator->CreateLostAllocation(pAllocation);
17710 #if VMA_RECORDING_ENABLED
17711 if(allocator->GetRecorder() != VMA_NULL)
17713 allocator->GetRecorder()->RecordCreateLostAllocation(
17714 allocator->GetCurrentFrameIndex(),
17725 VMA_ASSERT(allocator && allocation && ppData);
17727 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17729 VkResult res = allocator->Map(allocation, ppData);
17731 #if VMA_RECORDING_ENABLED
17732 if(allocator->GetRecorder() != VMA_NULL)
17734 allocator->GetRecorder()->RecordMapMemory(
17735 allocator->GetCurrentFrameIndex(),
17747 VMA_ASSERT(allocator && allocation);
17749 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17751 #if VMA_RECORDING_ENABLED
17752 if(allocator->GetRecorder() != VMA_NULL)
17754 allocator->GetRecorder()->RecordUnmapMemory(
17755 allocator->GetCurrentFrameIndex(),
17760 allocator->Unmap(allocation);
17765 VMA_ASSERT(allocator && allocation);
17767 VMA_DEBUG_LOG(
"vmaFlushAllocation");
17769 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17771 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
17773 #if VMA_RECORDING_ENABLED
17774 if(allocator->GetRecorder() != VMA_NULL)
17776 allocator->GetRecorder()->RecordFlushAllocation(
17777 allocator->GetCurrentFrameIndex(),
17778 allocation, offset, size);
17785 VMA_ASSERT(allocator && allocation);
17787 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
17789 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17791 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
17793 #if VMA_RECORDING_ENABLED
17794 if(allocator->GetRecorder() != VMA_NULL)
17796 allocator->GetRecorder()->RecordInvalidateAllocation(
17797 allocator->GetCurrentFrameIndex(),
17798 allocation, offset, size);
17805 VMA_ASSERT(allocator);
17807 VMA_DEBUG_LOG(
"vmaCheckCorruption");
17809 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17811 return allocator->CheckCorruption(memoryTypeBits);
17817 size_t allocationCount,
17818 VkBool32* pAllocationsChanged,
17828 if(pDefragmentationInfo != VMA_NULL)
17842 if(res == VK_NOT_READY)
17855 VMA_ASSERT(allocator && pInfo && pContext);
17866 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
17868 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
17870 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17872 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
17874 #if VMA_RECORDING_ENABLED
17875 if(allocator->GetRecorder() != VMA_NULL)
17877 allocator->GetRecorder()->RecordDefragmentationBegin(
17878 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
17889 VMA_ASSERT(allocator);
17891 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
17893 if(context != VK_NULL_HANDLE)
17895 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17897 #if VMA_RECORDING_ENABLED
17898 if(allocator->GetRecorder() != VMA_NULL)
17900 allocator->GetRecorder()->RecordDefragmentationEnd(
17901 allocator->GetCurrentFrameIndex(), context);
17905 return allocator->DefragmentationEnd(context);
17919 VMA_ASSERT(allocator);
17921 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
moveCount, pInfo->
pMoves));
17923 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
17925 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17927 if(context == VK_NULL_HANDLE)
17933 return allocator->DefragmentationPassBegin(pInfo, context);
17939 VMA_ASSERT(allocator);
17941 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
17942 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17944 if(context == VK_NULL_HANDLE)
17947 return allocator->DefragmentationPassEnd(context);
17955 VMA_ASSERT(allocator && allocation && buffer);
17957 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
17959 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17961 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
17967 VkDeviceSize allocationLocalOffset,
17971 VMA_ASSERT(allocator && allocation && buffer);
17973 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
17975 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17977 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
17985 VMA_ASSERT(allocator && allocation && image);
17987 VMA_DEBUG_LOG(
"vmaBindImageMemory");
17989 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17991 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
17997 VkDeviceSize allocationLocalOffset,
18001 VMA_ASSERT(allocator && allocation && image);
18003 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
18005 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18007 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
18012 const VkBufferCreateInfo* pBufferCreateInfo,
18018 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
18020 if(pBufferCreateInfo->size == 0)
18022 return VK_ERROR_VALIDATION_FAILED_EXT;
18025 VMA_DEBUG_LOG(
"vmaCreateBuffer");
18027 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18029 *pBuffer = VK_NULL_HANDLE;
18030 *pAllocation = VK_NULL_HANDLE;
18033 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
18034 allocator->m_hDevice,
18036 allocator->GetAllocationCallbacks(),
18041 VkMemoryRequirements vkMemReq = {};
18042 bool requiresDedicatedAllocation =
false;
18043 bool prefersDedicatedAllocation =
false;
18044 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
18045 requiresDedicatedAllocation, prefersDedicatedAllocation);
18049 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
18051 VMA_ASSERT(vkMemReq.alignment %
18052 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
18054 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
18056 VMA_ASSERT(vkMemReq.alignment %
18057 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
18059 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
18061 VMA_ASSERT(vkMemReq.alignment %
18062 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
18066 res = allocator->AllocateMemory(
18068 requiresDedicatedAllocation,
18069 prefersDedicatedAllocation,
18072 *pAllocationCreateInfo,
18073 VMA_SUBALLOCATION_TYPE_BUFFER,
18077 #if VMA_RECORDING_ENABLED
18078 if(allocator->GetRecorder() != VMA_NULL)
18080 allocator->GetRecorder()->RecordCreateBuffer(
18081 allocator->GetCurrentFrameIndex(),
18082 *pBufferCreateInfo,
18083 *pAllocationCreateInfo,
18093 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
18098 #if VMA_STATS_STRING_ENABLED
18099 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
18101 if(pAllocationInfo != VMA_NULL)
18103 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18108 allocator->FreeMemory(
18111 *pAllocation = VK_NULL_HANDLE;
18112 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18113 *pBuffer = VK_NULL_HANDLE;
18116 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18117 *pBuffer = VK_NULL_HANDLE;
18128 VMA_ASSERT(allocator);
18130 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18135 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
18137 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18139 #if VMA_RECORDING_ENABLED
18140 if(allocator->GetRecorder() != VMA_NULL)
18142 allocator->GetRecorder()->RecordDestroyBuffer(
18143 allocator->GetCurrentFrameIndex(),
18148 if(buffer != VK_NULL_HANDLE)
18150 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
18153 if(allocation != VK_NULL_HANDLE)
18155 allocator->FreeMemory(
18163 const VkImageCreateInfo* pImageCreateInfo,
18169 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
18171 if(pImageCreateInfo->extent.width == 0 ||
18172 pImageCreateInfo->extent.height == 0 ||
18173 pImageCreateInfo->extent.depth == 0 ||
18174 pImageCreateInfo->mipLevels == 0 ||
18175 pImageCreateInfo->arrayLayers == 0)
18177 return VK_ERROR_VALIDATION_FAILED_EXT;
18180 VMA_DEBUG_LOG(
"vmaCreateImage");
18182 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18184 *pImage = VK_NULL_HANDLE;
18185 *pAllocation = VK_NULL_HANDLE;
18188 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
18189 allocator->m_hDevice,
18191 allocator->GetAllocationCallbacks(),
18195 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
18196 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
18197 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
18200 VkMemoryRequirements vkMemReq = {};
18201 bool requiresDedicatedAllocation =
false;
18202 bool prefersDedicatedAllocation =
false;
18203 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
18204 requiresDedicatedAllocation, prefersDedicatedAllocation);
18206 res = allocator->AllocateMemory(
18208 requiresDedicatedAllocation,
18209 prefersDedicatedAllocation,
18212 *pAllocationCreateInfo,
18217 #if VMA_RECORDING_ENABLED
18218 if(allocator->GetRecorder() != VMA_NULL)
18220 allocator->GetRecorder()->RecordCreateImage(
18221 allocator->GetCurrentFrameIndex(),
18223 *pAllocationCreateInfo,
18233 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
18238 #if VMA_STATS_STRING_ENABLED
18239 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
18241 if(pAllocationInfo != VMA_NULL)
18243 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18248 allocator->FreeMemory(
18251 *pAllocation = VK_NULL_HANDLE;
18252 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18253 *pImage = VK_NULL_HANDLE;
18256 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18257 *pImage = VK_NULL_HANDLE;
18268 VMA_ASSERT(allocator);
18270 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18275 VMA_DEBUG_LOG(
"vmaDestroyImage");
18277 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18279 #if VMA_RECORDING_ENABLED
18280 if(allocator->GetRecorder() != VMA_NULL)
18282 allocator->GetRecorder()->RecordDestroyImage(
18283 allocator->GetCurrentFrameIndex(),
18288 if(image != VK_NULL_HANDLE)
18290 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
18292 if(allocation != VK_NULL_HANDLE)
18294 allocator->FreeMemory(
18300 #endif // #ifdef VMA_IMPLEMENTATION