23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1864 #ifndef VMA_RECORDING_ENABLED
1865 #define VMA_RECORDING_ENABLED 0
1869 #define NOMINMAX // For windows.h
1873 #include <vulkan/vulkan.h>
1876 #if VMA_RECORDING_ENABLED
1877 #include <windows.h>
1883 #if !defined(VMA_VULKAN_VERSION)
1884 #if defined(VK_VERSION_1_2)
1885 #define VMA_VULKAN_VERSION 1002000
1886 #elif defined(VK_VERSION_1_1)
1887 #define VMA_VULKAN_VERSION 1001000
1889 #define VMA_VULKAN_VERSION 1000000
1893 #if !defined(VMA_DEDICATED_ALLOCATION)
1894 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1895 #define VMA_DEDICATED_ALLOCATION 1
1897 #define VMA_DEDICATED_ALLOCATION 0
1901 #if !defined(VMA_BIND_MEMORY2)
1902 #if VK_KHR_bind_memory2
1903 #define VMA_BIND_MEMORY2 1
1905 #define VMA_BIND_MEMORY2 0
1909 #if !defined(VMA_MEMORY_BUDGET)
1910 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
1911 #define VMA_MEMORY_BUDGET 1
1913 #define VMA_MEMORY_BUDGET 0
1918 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
1919 #if VK_KHR_buffer_device_address || VK_EXT_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
1920 #define VMA_BUFFER_DEVICE_ADDRESS 1
1922 #define VMA_BUFFER_DEVICE_ADDRESS 0
1931 #ifndef VMA_CALL_PRE
1932 #define VMA_CALL_PRE
1934 #ifndef VMA_CALL_POST
1935 #define VMA_CALL_POST
1952 uint32_t memoryType,
1953 VkDeviceMemory memory,
1959 uint32_t memoryType,
1960 VkDeviceMemory memory,
2102 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2103 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
2104 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
2106 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2107 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
2108 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
2110 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2111 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
2277 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
2285 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
2295 uint32_t memoryTypeIndex,
2296 VkMemoryPropertyFlags* pFlags);
2308 uint32_t frameIndex);
2404 #ifndef VMA_STATS_STRING_ENABLED
2405 #define VMA_STATS_STRING_ENABLED 1
2408 #if VMA_STATS_STRING_ENABLED
2415 char** ppStatsString,
2416 VkBool32 detailedMap);
2420 char* pStatsString);
2422 #endif // #if VMA_STATS_STRING_ENABLED
2674 uint32_t memoryTypeBits,
2676 uint32_t* pMemoryTypeIndex);
2692 const VkBufferCreateInfo* pBufferCreateInfo,
2694 uint32_t* pMemoryTypeIndex);
2710 const VkImageCreateInfo* pImageCreateInfo,
2712 uint32_t* pMemoryTypeIndex);
2884 size_t* pLostAllocationCount);
2911 const char** ppName);
3004 const VkMemoryRequirements* pVkMemoryRequirements,
3030 const VkMemoryRequirements* pVkMemoryRequirements,
3032 size_t allocationCount,
3077 size_t allocationCount,
3090 VkDeviceSize newSize);
3508 size_t allocationCount,
3509 VkBool32* pAllocationsChanged,
3543 VkDeviceSize allocationLocalOffset,
3577 VkDeviceSize allocationLocalOffset,
3609 const VkBufferCreateInfo* pBufferCreateInfo,
3634 const VkImageCreateInfo* pImageCreateInfo,
3660 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3663 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3664 #define VMA_IMPLEMENTATION
3667 #ifdef VMA_IMPLEMENTATION
3668 #undef VMA_IMPLEMENTATION
3691 #if VMA_USE_STL_CONTAINERS
3692 #define VMA_USE_STL_VECTOR 1
3693 #define VMA_USE_STL_UNORDERED_MAP 1
3694 #define VMA_USE_STL_LIST 1
3697 #ifndef VMA_USE_STL_SHARED_MUTEX
3699 #if __cplusplus >= 201703L
3700 #define VMA_USE_STL_SHARED_MUTEX 1
3704 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3705 #define VMA_USE_STL_SHARED_MUTEX 1
3707 #define VMA_USE_STL_SHARED_MUTEX 0
3715 #if VMA_USE_STL_VECTOR
3719 #if VMA_USE_STL_UNORDERED_MAP
3720 #include <unordered_map>
3723 #if VMA_USE_STL_LIST
3732 #include <algorithm>
3737 #define VMA_NULL nullptr
3740 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3742 void *aligned_alloc(
size_t alignment,
size_t size)
3745 if(alignment <
sizeof(
void*))
3747 alignment =
sizeof(
void*);
3750 return memalign(alignment, size);
3752 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3754 void *aligned_alloc(
size_t alignment,
size_t size)
3757 if(alignment <
sizeof(
void*))
3759 alignment =
sizeof(
void*);
3763 if(posix_memalign(&pointer, alignment, size) == 0)
3777 #define VMA_ASSERT(expr)
3779 #define VMA_ASSERT(expr) assert(expr)
3785 #ifndef VMA_HEAVY_ASSERT
3787 #define VMA_HEAVY_ASSERT(expr)
3789 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3793 #ifndef VMA_ALIGN_OF
3794 #define VMA_ALIGN_OF(type) (__alignof(type))
3797 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3799 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3801 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3805 #ifndef VMA_SYSTEM_FREE
3807 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3809 #define VMA_SYSTEM_FREE(ptr) free(ptr)
3814 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
3818 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
3822 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
3826 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
3829 #ifndef VMA_DEBUG_LOG
3830 #define VMA_DEBUG_LOG(format, ...)
3840 #if VMA_STATS_STRING_ENABLED
3841 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3843 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
3845 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3847 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
3849 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3851 snprintf(outStr, strLen,
"%p", ptr);
3859 void Lock() { m_Mutex.lock(); }
3860 void Unlock() { m_Mutex.unlock(); }
3861 bool TryLock() {
return m_Mutex.try_lock(); }
3865 #define VMA_MUTEX VmaMutex
3869 #ifndef VMA_RW_MUTEX
3870 #if VMA_USE_STL_SHARED_MUTEX
3872 #include <shared_mutex>
3876 void LockRead() { m_Mutex.lock_shared(); }
3877 void UnlockRead() { m_Mutex.unlock_shared(); }
3878 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
3879 void LockWrite() { m_Mutex.lock(); }
3880 void UnlockWrite() { m_Mutex.unlock(); }
3881 bool TryLockWrite() {
return m_Mutex.try_lock(); }
3883 std::shared_mutex m_Mutex;
3885 #define VMA_RW_MUTEX VmaRWMutex
3886 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
3892 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3893 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3894 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3895 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
3896 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3897 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3898 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
3902 #define VMA_RW_MUTEX VmaRWMutex
3908 void LockRead() { m_Mutex.Lock(); }
3909 void UnlockRead() { m_Mutex.Unlock(); }
3910 bool TryLockRead() {
return m_Mutex.TryLock(); }
3911 void LockWrite() { m_Mutex.Lock(); }
3912 void UnlockWrite() { m_Mutex.Unlock(); }
3913 bool TryLockWrite() {
return m_Mutex.TryLock(); }
3917 #define VMA_RW_MUTEX VmaRWMutex
3918 #endif // #if VMA_USE_STL_SHARED_MUTEX
3919 #endif // #ifndef VMA_RW_MUTEX
3924 #ifndef VMA_ATOMIC_UINT32
3926 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
3929 #ifndef VMA_ATOMIC_UINT64
3931 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
3934 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
3939 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
3942 #ifndef VMA_DEBUG_ALIGNMENT
3947 #define VMA_DEBUG_ALIGNMENT (1)
3950 #ifndef VMA_DEBUG_MARGIN
3955 #define VMA_DEBUG_MARGIN (0)
3958 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
3963 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
3966 #ifndef VMA_DEBUG_DETECT_CORRUPTION
3972 #define VMA_DEBUG_DETECT_CORRUPTION (0)
3975 #ifndef VMA_DEBUG_GLOBAL_MUTEX
3980 #define VMA_DEBUG_GLOBAL_MUTEX (0)
3983 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
3988 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
3991 #ifndef VMA_SMALL_HEAP_MAX_SIZE
3992 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
3996 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
3997 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4001 #ifndef VMA_CLASS_NO_COPY
4002 #define VMA_CLASS_NO_COPY(className) \
4004 className(const className&) = delete; \
4005 className& operator=(const className&) = delete;
4008 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4011 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4013 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4014 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4022 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4023 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4024 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4026 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4028 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4029 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4032 static inline uint32_t VmaCountBitsSet(uint32_t v)
4034 uint32_t c = v - ((v >> 1) & 0x55555555);
4035 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4036 c = ((c >> 4) + c) & 0x0F0F0F0F;
4037 c = ((c >> 8) + c) & 0x00FF00FF;
4038 c = ((c >> 16) + c) & 0x0000FFFF;
4044 template <
typename T>
4045 static inline T VmaAlignUp(T val, T align)
4047 return (val + align - 1) / align * align;
4051 template <
typename T>
4052 static inline T VmaAlignDown(T val, T align)
4054 return val / align * align;
4058 template <
typename T>
4059 static inline T VmaRoundDiv(T x, T y)
4061 return (x + (y / (T)2)) / y;
4069 template <
typename T>
4070 inline bool VmaIsPow2(T x)
4072 return (x & (x-1)) == 0;
4076 static inline uint32_t VmaNextPow2(uint32_t v)
4087 static inline uint64_t VmaNextPow2(uint64_t v)
4101 static inline uint32_t VmaPrevPow2(uint32_t v)
4111 static inline uint64_t VmaPrevPow2(uint64_t v)
4123 static inline bool VmaStrIsEmpty(
const char* pStr)
4125 return pStr == VMA_NULL || *pStr ==
'\0';
4128 #if VMA_STATS_STRING_ENABLED
4130 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4146 #endif // #if VMA_STATS_STRING_ENABLED
4150 template<
typename Iterator,
typename Compare>
4151 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4153 Iterator centerValue = end; --centerValue;
4154 Iterator insertIndex = beg;
4155 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4157 if(cmp(*memTypeIndex, *centerValue))
4159 if(insertIndex != memTypeIndex)
4161 VMA_SWAP(*memTypeIndex, *insertIndex);
4166 if(insertIndex != centerValue)
4168 VMA_SWAP(*insertIndex, *centerValue);
4173 template<
typename Iterator,
typename Compare>
4174 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4178 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4179 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4180 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4184 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4186 #endif // #ifndef VMA_SORT
4195 static inline bool VmaBlocksOnSamePage(
4196 VkDeviceSize resourceAOffset,
4197 VkDeviceSize resourceASize,
4198 VkDeviceSize resourceBOffset,
4199 VkDeviceSize pageSize)
4201 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4202 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4203 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4204 VkDeviceSize resourceBStart = resourceBOffset;
4205 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4206 return resourceAEndPage == resourceBStartPage;
4209 enum VmaSuballocationType
4211 VMA_SUBALLOCATION_TYPE_FREE = 0,
4212 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4213 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4214 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4215 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4216 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4217 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4226 static inline bool VmaIsBufferImageGranularityConflict(
4227 VmaSuballocationType suballocType1,
4228 VmaSuballocationType suballocType2)
4230 if(suballocType1 > suballocType2)
4232 VMA_SWAP(suballocType1, suballocType2);
4235 switch(suballocType1)
4237 case VMA_SUBALLOCATION_TYPE_FREE:
4239 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4241 case VMA_SUBALLOCATION_TYPE_BUFFER:
4243 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4244 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4245 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4247 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4248 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4249 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4250 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4252 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4253 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4261 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4263 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4264 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4265 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4266 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4268 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4275 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4277 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4278 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4279 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4280 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4282 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4295 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4297 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4298 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4299 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4300 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4306 VMA_CLASS_NO_COPY(VmaMutexLock)
4308 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4309 m_pMutex(useMutex ? &mutex : VMA_NULL)
4310 {
if(m_pMutex) { m_pMutex->Lock(); } }
4312 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4314 VMA_MUTEX* m_pMutex;
4318 struct VmaMutexLockRead
4320 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4322 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4323 m_pMutex(useMutex ? &mutex : VMA_NULL)
4324 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4325 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4327 VMA_RW_MUTEX* m_pMutex;
4331 struct VmaMutexLockWrite
4333 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4335 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4336 m_pMutex(useMutex ? &mutex : VMA_NULL)
4337 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4338 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4340 VMA_RW_MUTEX* m_pMutex;
4343 #if VMA_DEBUG_GLOBAL_MUTEX
4344 static VMA_MUTEX gDebugGlobalMutex;
4345 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4347 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4351 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4362 template <
typename CmpLess,
typename IterT,
typename KeyT>
4363 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4365 size_t down = 0, up = (end - beg);
4368 const size_t mid = (down + up) / 2;
4369 if(cmp(*(beg+mid), key))
4381 template<
typename CmpLess,
typename IterT,
typename KeyT>
4382 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4384 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4385 beg, end, value, cmp);
4387 (!cmp(*it, value) && !cmp(value, *it)))
4399 template<
typename T>
4400 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4402 for(uint32_t i = 0; i < count; ++i)
4404 const T iPtr = arr[i];
4405 if(iPtr == VMA_NULL)
4409 for(uint32_t j = i + 1; j < count; ++j)
4420 template<
typename MainT,
typename NewT>
4421 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4423 newStruct->pNext = mainStruct->pNext;
4424 mainStruct->pNext = newStruct;
4430 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4432 if((pAllocationCallbacks != VMA_NULL) &&
4433 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4435 return (*pAllocationCallbacks->pfnAllocation)(
4436 pAllocationCallbacks->pUserData,
4439 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4443 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4447 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4449 if((pAllocationCallbacks != VMA_NULL) &&
4450 (pAllocationCallbacks->pfnFree != VMA_NULL))
4452 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4456 VMA_SYSTEM_FREE(ptr);
4460 template<
typename T>
4461 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4463 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4466 template<
typename T>
4467 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4469 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4472 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4474 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4476 template<
typename T>
4477 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4480 VmaFree(pAllocationCallbacks, ptr);
4483 template<
typename T>
4484 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4488 for(
size_t i = count; i--; )
4492 VmaFree(pAllocationCallbacks, ptr);
4496 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4498 if(srcStr != VMA_NULL)
4500 const size_t len = strlen(srcStr);
4501 char*
const result = vma_new_array(allocs,
char, len + 1);
4502 memcpy(result, srcStr, len + 1);
4511 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4515 const size_t len = strlen(str);
4516 vma_delete_array(allocs, str, len + 1);
4521 template<
typename T>
4522 class VmaStlAllocator
4525 const VkAllocationCallbacks*
const m_pCallbacks;
4526 typedef T value_type;
4528 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4529 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4531 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4532 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4534 template<
typename U>
4535 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4537 return m_pCallbacks == rhs.m_pCallbacks;
4539 template<
typename U>
4540 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4542 return m_pCallbacks != rhs.m_pCallbacks;
4545 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4548 #if VMA_USE_STL_VECTOR
4550 #define VmaVector std::vector
4552 template<
typename T,
typename allocatorT>
4553 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4555 vec.insert(vec.begin() + index, item);
4558 template<
typename T,
typename allocatorT>
4559 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4561 vec.erase(vec.begin() + index);
4564 #else // #if VMA_USE_STL_VECTOR
4569 template<
typename T,
typename AllocatorT>
4573 typedef T value_type;
4575 VmaVector(
const AllocatorT& allocator) :
4576 m_Allocator(allocator),
4583 VmaVector(
size_t count,
const AllocatorT& allocator) :
4584 m_Allocator(allocator),
4585 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4593 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4594 : VmaVector(count, allocator) {}
4596 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4597 m_Allocator(src.m_Allocator),
4598 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4599 m_Count(src.m_Count),
4600 m_Capacity(src.m_Count)
4604 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4610 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4613 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4617 resize(rhs.m_Count);
4620 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4626 bool empty()
const {
return m_Count == 0; }
4627 size_t size()
const {
return m_Count; }
4628 T* data() {
return m_pArray; }
4629 const T* data()
const {
return m_pArray; }
4631 T& operator[](
size_t index)
4633 VMA_HEAVY_ASSERT(index < m_Count);
4634 return m_pArray[index];
4636 const T& operator[](
size_t index)
const
4638 VMA_HEAVY_ASSERT(index < m_Count);
4639 return m_pArray[index];
4644 VMA_HEAVY_ASSERT(m_Count > 0);
4647 const T& front()
const
4649 VMA_HEAVY_ASSERT(m_Count > 0);
4654 VMA_HEAVY_ASSERT(m_Count > 0);
4655 return m_pArray[m_Count - 1];
4657 const T& back()
const
4659 VMA_HEAVY_ASSERT(m_Count > 0);
4660 return m_pArray[m_Count - 1];
4663 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4665 newCapacity = VMA_MAX(newCapacity, m_Count);
4667 if((newCapacity < m_Capacity) && !freeMemory)
4669 newCapacity = m_Capacity;
4672 if(newCapacity != m_Capacity)
4674 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4677 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4679 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4680 m_Capacity = newCapacity;
4681 m_pArray = newArray;
4685 void resize(
size_t newCount,
bool freeMemory =
false)
4687 size_t newCapacity = m_Capacity;
4688 if(newCount > m_Capacity)
4690 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4694 newCapacity = newCount;
4697 if(newCapacity != m_Capacity)
4699 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4700 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4701 if(elementsToCopy != 0)
4703 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4705 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4706 m_Capacity = newCapacity;
4707 m_pArray = newArray;
4713 void clear(
bool freeMemory =
false)
4715 resize(0, freeMemory);
4718 void insert(
size_t index,
const T& src)
4720 VMA_HEAVY_ASSERT(index <= m_Count);
4721 const size_t oldCount = size();
4722 resize(oldCount + 1);
4723 if(index < oldCount)
4725 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4727 m_pArray[index] = src;
4730 void remove(
size_t index)
4732 VMA_HEAVY_ASSERT(index < m_Count);
4733 const size_t oldCount = size();
4734 if(index < oldCount - 1)
4736 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4738 resize(oldCount - 1);
4741 void push_back(
const T& src)
4743 const size_t newIndex = size();
4744 resize(newIndex + 1);
4745 m_pArray[newIndex] = src;
4750 VMA_HEAVY_ASSERT(m_Count > 0);
4754 void push_front(
const T& src)
4761 VMA_HEAVY_ASSERT(m_Count > 0);
4765 typedef T* iterator;
4767 iterator begin() {
return m_pArray; }
4768 iterator end() {
return m_pArray + m_Count; }
4771 AllocatorT m_Allocator;
4777 template<
typename T,
typename allocatorT>
4778 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4780 vec.insert(index, item);
4783 template<
typename T,
typename allocatorT>
4784 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4789 #endif // #if VMA_USE_STL_VECTOR
4791 template<
typename CmpLess,
typename VectorT>
4792 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4794 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4796 vector.data() + vector.size(),
4798 CmpLess()) - vector.data();
4799 VmaVectorInsert(vector, indexToInsert, value);
4800 return indexToInsert;
4803 template<
typename CmpLess,
typename VectorT>
4804 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4807 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4812 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4814 size_t indexToRemove = it - vector.begin();
4815 VmaVectorRemove(vector, indexToRemove);
4829 template<
typename T>
4830 class VmaPoolAllocator
4832 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4834 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4835 ~VmaPoolAllocator();
4836 template<
typename... Types> T* Alloc(Types... args);
4842 uint32_t NextFreeIndex;
4843 alignas(T)
char Value[
sizeof(T)];
4850 uint32_t FirstFreeIndex;
4853 const VkAllocationCallbacks* m_pAllocationCallbacks;
4854 const uint32_t m_FirstBlockCapacity;
4855 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4857 ItemBlock& CreateNewBlock();
4860 template<
typename T>
4861 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4862 m_pAllocationCallbacks(pAllocationCallbacks),
4863 m_FirstBlockCapacity(firstBlockCapacity),
4864 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4866 VMA_ASSERT(m_FirstBlockCapacity > 1);
4869 template<
typename T>
4870 VmaPoolAllocator<T>::~VmaPoolAllocator()
4872 for(
size_t i = m_ItemBlocks.size(); i--; )
4873 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4874 m_ItemBlocks.clear();
4877 template<
typename T>
4878 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
4880 for(
size_t i = m_ItemBlocks.size(); i--; )
4882 ItemBlock& block = m_ItemBlocks[i];
4884 if(block.FirstFreeIndex != UINT32_MAX)
4886 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4887 block.FirstFreeIndex = pItem->NextFreeIndex;
4888 T* result = (T*)&pItem->Value;
4889 new(result)T(std::forward<Types>(args)...);
4895 ItemBlock& newBlock = CreateNewBlock();
4896 Item*
const pItem = &newBlock.pItems[0];
4897 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4898 T* result = (T*)&pItem->Value;
4899 new(result)T(std::forward<Types>(args)...);
4903 template<
typename T>
4904 void VmaPoolAllocator<T>::Free(T* ptr)
4907 for(
size_t i = m_ItemBlocks.size(); i--; )
4909 ItemBlock& block = m_ItemBlocks[i];
4913 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4916 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4919 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
4920 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4921 block.FirstFreeIndex = index;
4925 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4928 template<
typename T>
4929 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4931 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4932 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4934 const ItemBlock newBlock = {
4935 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4939 m_ItemBlocks.push_back(newBlock);
4942 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4943 newBlock.pItems[i].NextFreeIndex = i + 1;
4944 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4945 return m_ItemBlocks.back();
4951 #if VMA_USE_STL_LIST
4953 #define VmaList std::list
4955 #else // #if VMA_USE_STL_LIST
4957 template<
typename T>
4966 template<
typename T>
4969 VMA_CLASS_NO_COPY(VmaRawList)
4971 typedef VmaListItem<T> ItemType;
4973 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4977 size_t GetCount()
const {
return m_Count; }
4978 bool IsEmpty()
const {
return m_Count == 0; }
4980 ItemType* Front() {
return m_pFront; }
4981 const ItemType* Front()
const {
return m_pFront; }
4982 ItemType* Back() {
return m_pBack; }
4983 const ItemType* Back()
const {
return m_pBack; }
4985 ItemType* PushBack();
4986 ItemType* PushFront();
4987 ItemType* PushBack(
const T& value);
4988 ItemType* PushFront(
const T& value);
4993 ItemType* InsertBefore(ItemType* pItem);
4995 ItemType* InsertAfter(ItemType* pItem);
4997 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4998 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5000 void Remove(ItemType* pItem);
5003 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5004 VmaPoolAllocator<ItemType> m_ItemAllocator;
5010 template<
typename T>
5011 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5012 m_pAllocationCallbacks(pAllocationCallbacks),
5013 m_ItemAllocator(pAllocationCallbacks, 128),
5020 template<
typename T>
5021 VmaRawList<T>::~VmaRawList()
5027 template<
typename T>
5028 void VmaRawList<T>::Clear()
5030 if(IsEmpty() ==
false)
5032 ItemType* pItem = m_pBack;
5033 while(pItem != VMA_NULL)
5035 ItemType*
const pPrevItem = pItem->pPrev;
5036 m_ItemAllocator.Free(pItem);
5039 m_pFront = VMA_NULL;
5045 template<
typename T>
5046 VmaListItem<T>* VmaRawList<T>::PushBack()
5048 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5049 pNewItem->pNext = VMA_NULL;
5052 pNewItem->pPrev = VMA_NULL;
5053 m_pFront = pNewItem;
5059 pNewItem->pPrev = m_pBack;
5060 m_pBack->pNext = pNewItem;
5067 template<
typename T>
5068 VmaListItem<T>* VmaRawList<T>::PushFront()
5070 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5071 pNewItem->pPrev = VMA_NULL;
5074 pNewItem->pNext = VMA_NULL;
5075 m_pFront = pNewItem;
5081 pNewItem->pNext = m_pFront;
5082 m_pFront->pPrev = pNewItem;
5083 m_pFront = pNewItem;
5089 template<
typename T>
5090 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5092 ItemType*
const pNewItem = PushBack();
5093 pNewItem->Value = value;
5097 template<
typename T>
5098 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5100 ItemType*
const pNewItem = PushFront();
5101 pNewItem->Value = value;
5105 template<
typename T>
5106 void VmaRawList<T>::PopBack()
5108 VMA_HEAVY_ASSERT(m_Count > 0);
5109 ItemType*
const pBackItem = m_pBack;
5110 ItemType*
const pPrevItem = pBackItem->pPrev;
5111 if(pPrevItem != VMA_NULL)
5113 pPrevItem->pNext = VMA_NULL;
5115 m_pBack = pPrevItem;
5116 m_ItemAllocator.Free(pBackItem);
5120 template<
typename T>
5121 void VmaRawList<T>::PopFront()
5123 VMA_HEAVY_ASSERT(m_Count > 0);
5124 ItemType*
const pFrontItem = m_pFront;
5125 ItemType*
const pNextItem = pFrontItem->pNext;
5126 if(pNextItem != VMA_NULL)
5128 pNextItem->pPrev = VMA_NULL;
5130 m_pFront = pNextItem;
5131 m_ItemAllocator.Free(pFrontItem);
5135 template<
typename T>
5136 void VmaRawList<T>::Remove(ItemType* pItem)
5138 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5139 VMA_HEAVY_ASSERT(m_Count > 0);
5141 if(pItem->pPrev != VMA_NULL)
5143 pItem->pPrev->pNext = pItem->pNext;
5147 VMA_HEAVY_ASSERT(m_pFront == pItem);
5148 m_pFront = pItem->pNext;
5151 if(pItem->pNext != VMA_NULL)
5153 pItem->pNext->pPrev = pItem->pPrev;
5157 VMA_HEAVY_ASSERT(m_pBack == pItem);
5158 m_pBack = pItem->pPrev;
5161 m_ItemAllocator.Free(pItem);
5165 template<
typename T>
5166 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5168 if(pItem != VMA_NULL)
5170 ItemType*
const prevItem = pItem->pPrev;
5171 ItemType*
const newItem = m_ItemAllocator.Alloc();
5172 newItem->pPrev = prevItem;
5173 newItem->pNext = pItem;
5174 pItem->pPrev = newItem;
5175 if(prevItem != VMA_NULL)
5177 prevItem->pNext = newItem;
5181 VMA_HEAVY_ASSERT(m_pFront == pItem);
5191 template<
typename T>
5192 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5194 if(pItem != VMA_NULL)
5196 ItemType*
const nextItem = pItem->pNext;
5197 ItemType*
const newItem = m_ItemAllocator.Alloc();
5198 newItem->pNext = nextItem;
5199 newItem->pPrev = pItem;
5200 pItem->pNext = newItem;
5201 if(nextItem != VMA_NULL)
5203 nextItem->pPrev = newItem;
5207 VMA_HEAVY_ASSERT(m_pBack == pItem);
5217 template<
typename T>
5218 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5220 ItemType*
const newItem = InsertBefore(pItem);
5221 newItem->Value = value;
5225 template<
typename T>
5226 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5228 ItemType*
const newItem = InsertAfter(pItem);
5229 newItem->Value = value;
5233 template<
typename T,
typename AllocatorT>
5236 VMA_CLASS_NO_COPY(VmaList)
5247 T& operator*()
const
5249 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5250 return m_pItem->Value;
5252 T* operator->()
const
5254 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5255 return &m_pItem->Value;
5258 iterator& operator++()
5260 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5261 m_pItem = m_pItem->pNext;
5264 iterator& operator--()
5266 if(m_pItem != VMA_NULL)
5268 m_pItem = m_pItem->pPrev;
5272 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5273 m_pItem = m_pList->Back();
5278 iterator operator++(
int)
5280 iterator result = *
this;
5284 iterator operator--(
int)
5286 iterator result = *
this;
5291 bool operator==(
const iterator& rhs)
const
5293 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5294 return m_pItem == rhs.m_pItem;
5296 bool operator!=(
const iterator& rhs)
const
5298 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5299 return m_pItem != rhs.m_pItem;
5303 VmaRawList<T>* m_pList;
5304 VmaListItem<T>* m_pItem;
5306 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5312 friend class VmaList<T, AllocatorT>;
5315 class const_iterator
5324 const_iterator(
const iterator& src) :
5325 m_pList(src.m_pList),
5326 m_pItem(src.m_pItem)
5330 const T& operator*()
const
5332 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5333 return m_pItem->Value;
5335 const T* operator->()
const
5337 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5338 return &m_pItem->Value;
5341 const_iterator& operator++()
5343 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5344 m_pItem = m_pItem->pNext;
5347 const_iterator& operator--()
5349 if(m_pItem != VMA_NULL)
5351 m_pItem = m_pItem->pPrev;
5355 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5356 m_pItem = m_pList->Back();
5361 const_iterator operator++(
int)
5363 const_iterator result = *
this;
5367 const_iterator operator--(
int)
5369 const_iterator result = *
this;
5374 bool operator==(
const const_iterator& rhs)
const
5376 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5377 return m_pItem == rhs.m_pItem;
5379 bool operator!=(
const const_iterator& rhs)
const
5381 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5382 return m_pItem != rhs.m_pItem;
5386 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5392 const VmaRawList<T>* m_pList;
5393 const VmaListItem<T>* m_pItem;
5395 friend class VmaList<T, AllocatorT>;
5398 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5400 bool empty()
const {
return m_RawList.IsEmpty(); }
5401 size_t size()
const {
return m_RawList.GetCount(); }
5403 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5404 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5406 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5407 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5409 void clear() { m_RawList.Clear(); }
5410 void push_back(
const T& value) { m_RawList.PushBack(value); }
5411 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5412 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5415 VmaRawList<T> m_RawList;
5418 #endif // #if VMA_USE_STL_LIST
5426 #if VMA_USE_STL_UNORDERED_MAP
5428 #define VmaPair std::pair
5430 #define VMA_MAP_TYPE(KeyT, ValueT) \
5431 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5433 #else // #if VMA_USE_STL_UNORDERED_MAP
5435 template<
typename T1,
typename T2>
5441 VmaPair() : first(), second() { }
5442 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5448 template<
typename KeyT,
typename ValueT>
5452 typedef VmaPair<KeyT, ValueT> PairType;
5453 typedef PairType* iterator;
5455 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5457 iterator begin() {
return m_Vector.begin(); }
5458 iterator end() {
return m_Vector.end(); }
5460 void insert(
const PairType& pair);
5461 iterator find(
const KeyT& key);
5462 void erase(iterator it);
5465 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5468 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5470 template<
typename FirstT,
typename SecondT>
5471 struct VmaPairFirstLess
5473 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5475 return lhs.first < rhs.first;
5477 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5479 return lhs.first < rhsFirst;
5483 template<
typename KeyT,
typename ValueT>
5484 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5486 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5488 m_Vector.data() + m_Vector.size(),
5490 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5491 VmaVectorInsert(m_Vector, indexToInsert, pair);
5494 template<
typename KeyT,
typename ValueT>
5495 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5497 PairType* it = VmaBinaryFindFirstNotLess(
5499 m_Vector.data() + m_Vector.size(),
5501 VmaPairFirstLess<KeyT, ValueT>());
5502 if((it != m_Vector.end()) && (it->first == key))
5508 return m_Vector.end();
5512 template<
typename KeyT,
typename ValueT>
5513 void VmaMap<KeyT, ValueT>::erase(iterator it)
5515 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5518 #endif // #if VMA_USE_STL_UNORDERED_MAP
5524 class VmaDeviceMemoryBlock;
5526 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5528 struct VmaAllocation_T
5531 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5535 FLAG_USER_DATA_STRING = 0x01,
5539 enum ALLOCATION_TYPE
5541 ALLOCATION_TYPE_NONE,
5542 ALLOCATION_TYPE_BLOCK,
5543 ALLOCATION_TYPE_DEDICATED,
5550 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
5553 m_pUserData{VMA_NULL},
5554 m_LastUseFrameIndex{currentFrameIndex},
5555 m_MemoryTypeIndex{0},
5556 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
5557 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
5559 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
5561 #if VMA_STATS_STRING_ENABLED
5562 m_CreationFrameIndex = currentFrameIndex;
5563 m_BufferImageUsage = 0;
5569 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5572 VMA_ASSERT(m_pUserData == VMA_NULL);
5575 void InitBlockAllocation(
5576 VmaDeviceMemoryBlock* block,
5577 VkDeviceSize offset,
5578 VkDeviceSize alignment,
5580 uint32_t memoryTypeIndex,
5581 VmaSuballocationType suballocationType,
5585 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5586 VMA_ASSERT(block != VMA_NULL);
5587 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5588 m_Alignment = alignment;
5590 m_MemoryTypeIndex = memoryTypeIndex;
5591 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5592 m_SuballocationType = (uint8_t)suballocationType;
5593 m_BlockAllocation.m_Block = block;
5594 m_BlockAllocation.m_Offset = offset;
5595 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5600 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5601 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5602 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5603 m_MemoryTypeIndex = 0;
5604 m_BlockAllocation.m_Block = VMA_NULL;
5605 m_BlockAllocation.m_Offset = 0;
5606 m_BlockAllocation.m_CanBecomeLost =
true;
5609 void ChangeBlockAllocation(
5611 VmaDeviceMemoryBlock* block,
5612 VkDeviceSize offset);
5614 void ChangeOffset(VkDeviceSize newOffset);
5617 void InitDedicatedAllocation(
5618 uint32_t memoryTypeIndex,
5619 VkDeviceMemory hMemory,
5620 VmaSuballocationType suballocationType,
5624 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5625 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5626 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5629 m_MemoryTypeIndex = memoryTypeIndex;
5630 m_SuballocationType = (uint8_t)suballocationType;
5631 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5632 m_DedicatedAllocation.m_hMemory = hMemory;
5633 m_DedicatedAllocation.m_pMappedData = pMappedData;
5636 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5637 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5638 VkDeviceSize GetSize()
const {
return m_Size; }
5639 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5640 void* GetUserData()
const {
return m_pUserData; }
5641 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5642 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5644 VmaDeviceMemoryBlock* GetBlock()
const
5646 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5647 return m_BlockAllocation.m_Block;
5649 VkDeviceSize GetOffset()
const;
5650 VkDeviceMemory GetMemory()
const;
5651 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5652 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5653 void* GetMappedData()
const;
5654 bool CanBecomeLost()
const;
5656 uint32_t GetLastUseFrameIndex()
const
5658 return m_LastUseFrameIndex.load();
5660 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5662 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5672 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5674 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5676 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5687 void BlockAllocMap();
5688 void BlockAllocUnmap();
5689 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5692 #if VMA_STATS_STRING_ENABLED
5693 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5694 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5696 void InitBufferImageUsage(uint32_t bufferImageUsage)
5698 VMA_ASSERT(m_BufferImageUsage == 0);
5699 m_BufferImageUsage = bufferImageUsage;
5702 void PrintParameters(
class VmaJsonWriter& json)
const;
5706 VkDeviceSize m_Alignment;
5707 VkDeviceSize m_Size;
5709 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5710 uint32_t m_MemoryTypeIndex;
5712 uint8_t m_SuballocationType;
5719 struct BlockAllocation
5721 VmaDeviceMemoryBlock* m_Block;
5722 VkDeviceSize m_Offset;
5723 bool m_CanBecomeLost;
5727 struct DedicatedAllocation
5729 VkDeviceMemory m_hMemory;
5730 void* m_pMappedData;
5736 BlockAllocation m_BlockAllocation;
5738 DedicatedAllocation m_DedicatedAllocation;
5741 #if VMA_STATS_STRING_ENABLED
5742 uint32_t m_CreationFrameIndex;
5743 uint32_t m_BufferImageUsage;
5753 struct VmaSuballocation
5755 VkDeviceSize offset;
5758 VmaSuballocationType type;
5762 struct VmaSuballocationOffsetLess
5764 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5766 return lhs.offset < rhs.offset;
5769 struct VmaSuballocationOffsetGreater
5771 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5773 return lhs.offset > rhs.offset;
5777 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5780 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5782 enum class VmaAllocationRequestType
5804 struct VmaAllocationRequest
5806 VkDeviceSize offset;
5807 VkDeviceSize sumFreeSize;
5808 VkDeviceSize sumItemSize;
5809 VmaSuballocationList::iterator item;
5810 size_t itemsToMakeLostCount;
5812 VmaAllocationRequestType type;
5814 VkDeviceSize CalcCost()
const
5816 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5824 class VmaBlockMetadata
5828 virtual ~VmaBlockMetadata() { }
5829 virtual void Init(VkDeviceSize size) { m_Size = size; }
5832 virtual bool Validate()
const = 0;
5833 VkDeviceSize GetSize()
const {
return m_Size; }
5834 virtual size_t GetAllocationCount()
const = 0;
5835 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5836 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5838 virtual bool IsEmpty()
const = 0;
5840 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5842 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5844 #if VMA_STATS_STRING_ENABLED
5845 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5851 virtual bool CreateAllocationRequest(
5852 uint32_t currentFrameIndex,
5853 uint32_t frameInUseCount,
5854 VkDeviceSize bufferImageGranularity,
5855 VkDeviceSize allocSize,
5856 VkDeviceSize allocAlignment,
5858 VmaSuballocationType allocType,
5859 bool canMakeOtherLost,
5862 VmaAllocationRequest* pAllocationRequest) = 0;
5864 virtual bool MakeRequestedAllocationsLost(
5865 uint32_t currentFrameIndex,
5866 uint32_t frameInUseCount,
5867 VmaAllocationRequest* pAllocationRequest) = 0;
5869 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5871 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5875 const VmaAllocationRequest& request,
5876 VmaSuballocationType type,
5877 VkDeviceSize allocSize,
5882 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5885 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5887 #if VMA_STATS_STRING_ENABLED
5888 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5889 VkDeviceSize unusedBytes,
5890 size_t allocationCount,
5891 size_t unusedRangeCount)
const;
5892 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5893 VkDeviceSize offset,
5895 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5896 VkDeviceSize offset,
5897 VkDeviceSize size)
const;
5898 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5902 VkDeviceSize m_Size;
5903 const VkAllocationCallbacks* m_pAllocationCallbacks;
5906 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
5907 VMA_ASSERT(0 && "Validation failed: " #cond); \
5911 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5913 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5916 virtual ~VmaBlockMetadata_Generic();
5917 virtual void Init(VkDeviceSize size);
5919 virtual bool Validate()
const;
5920 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5921 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5922 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5923 virtual bool IsEmpty()
const;
5925 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5926 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5928 #if VMA_STATS_STRING_ENABLED
5929 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5932 virtual bool CreateAllocationRequest(
5933 uint32_t currentFrameIndex,
5934 uint32_t frameInUseCount,
5935 VkDeviceSize bufferImageGranularity,
5936 VkDeviceSize allocSize,
5937 VkDeviceSize allocAlignment,
5939 VmaSuballocationType allocType,
5940 bool canMakeOtherLost,
5942 VmaAllocationRequest* pAllocationRequest);
5944 virtual bool MakeRequestedAllocationsLost(
5945 uint32_t currentFrameIndex,
5946 uint32_t frameInUseCount,
5947 VmaAllocationRequest* pAllocationRequest);
5949 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5951 virtual VkResult CheckCorruption(
const void* pBlockData);
5954 const VmaAllocationRequest& request,
5955 VmaSuballocationType type,
5956 VkDeviceSize allocSize,
5960 virtual void FreeAtOffset(VkDeviceSize offset);
5965 bool IsBufferImageGranularityConflictPossible(
5966 VkDeviceSize bufferImageGranularity,
5967 VmaSuballocationType& inOutPrevSuballocType)
const;
5970 friend class VmaDefragmentationAlgorithm_Generic;
5971 friend class VmaDefragmentationAlgorithm_Fast;
5973 uint32_t m_FreeCount;
5974 VkDeviceSize m_SumFreeSize;
5975 VmaSuballocationList m_Suballocations;
5978 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5980 bool ValidateFreeSuballocationList()
const;
5984 bool CheckAllocation(
5985 uint32_t currentFrameIndex,
5986 uint32_t frameInUseCount,
5987 VkDeviceSize bufferImageGranularity,
5988 VkDeviceSize allocSize,
5989 VkDeviceSize allocAlignment,
5990 VmaSuballocationType allocType,
5991 VmaSuballocationList::const_iterator suballocItem,
5992 bool canMakeOtherLost,
5993 VkDeviceSize* pOffset,
5994 size_t* itemsToMakeLostCount,
5995 VkDeviceSize* pSumFreeSize,
5996 VkDeviceSize* pSumItemSize)
const;
5998 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6002 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6005 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6008 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6089 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6091 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6094 virtual ~VmaBlockMetadata_Linear();
6095 virtual void Init(VkDeviceSize size);
6097 virtual bool Validate()
const;
6098 virtual size_t GetAllocationCount()
const;
6099 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6100 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6101 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6103 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6104 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6106 #if VMA_STATS_STRING_ENABLED
6107 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6110 virtual bool CreateAllocationRequest(
6111 uint32_t currentFrameIndex,
6112 uint32_t frameInUseCount,
6113 VkDeviceSize bufferImageGranularity,
6114 VkDeviceSize allocSize,
6115 VkDeviceSize allocAlignment,
6117 VmaSuballocationType allocType,
6118 bool canMakeOtherLost,
6120 VmaAllocationRequest* pAllocationRequest);
6122 virtual bool MakeRequestedAllocationsLost(
6123 uint32_t currentFrameIndex,
6124 uint32_t frameInUseCount,
6125 VmaAllocationRequest* pAllocationRequest);
6127 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6129 virtual VkResult CheckCorruption(
const void* pBlockData);
6132 const VmaAllocationRequest& request,
6133 VmaSuballocationType type,
6134 VkDeviceSize allocSize,
6138 virtual void FreeAtOffset(VkDeviceSize offset);
6148 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6150 enum SECOND_VECTOR_MODE
6152 SECOND_VECTOR_EMPTY,
6157 SECOND_VECTOR_RING_BUFFER,
6163 SECOND_VECTOR_DOUBLE_STACK,
6166 VkDeviceSize m_SumFreeSize;
6167 SuballocationVectorType m_Suballocations0, m_Suballocations1;
6168 uint32_t m_1stVectorIndex;
6169 SECOND_VECTOR_MODE m_2ndVectorMode;
6171 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6172 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6173 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6174 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6177 size_t m_1stNullItemsBeginCount;
6179 size_t m_1stNullItemsMiddleCount;
6181 size_t m_2ndNullItemsCount;
6183 bool ShouldCompact1st()
const;
6184 void CleanupAfterFree();
6186 bool CreateAllocationRequest_LowerAddress(
6187 uint32_t currentFrameIndex,
6188 uint32_t frameInUseCount,
6189 VkDeviceSize bufferImageGranularity,
6190 VkDeviceSize allocSize,
6191 VkDeviceSize allocAlignment,
6192 VmaSuballocationType allocType,
6193 bool canMakeOtherLost,
6195 VmaAllocationRequest* pAllocationRequest);
6196 bool CreateAllocationRequest_UpperAddress(
6197 uint32_t currentFrameIndex,
6198 uint32_t frameInUseCount,
6199 VkDeviceSize bufferImageGranularity,
6200 VkDeviceSize allocSize,
6201 VkDeviceSize allocAlignment,
6202 VmaSuballocationType allocType,
6203 bool canMakeOtherLost,
6205 VmaAllocationRequest* pAllocationRequest);
6219 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6221 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6224 virtual ~VmaBlockMetadata_Buddy();
6225 virtual void Init(VkDeviceSize size);
6227 virtual bool Validate()
const;
6228 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6229 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6230 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6231 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6233 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6234 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6236 #if VMA_STATS_STRING_ENABLED
6237 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6240 virtual bool CreateAllocationRequest(
6241 uint32_t currentFrameIndex,
6242 uint32_t frameInUseCount,
6243 VkDeviceSize bufferImageGranularity,
6244 VkDeviceSize allocSize,
6245 VkDeviceSize allocAlignment,
6247 VmaSuballocationType allocType,
6248 bool canMakeOtherLost,
6250 VmaAllocationRequest* pAllocationRequest);
6252 virtual bool MakeRequestedAllocationsLost(
6253 uint32_t currentFrameIndex,
6254 uint32_t frameInUseCount,
6255 VmaAllocationRequest* pAllocationRequest);
6257 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6259 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6262 const VmaAllocationRequest& request,
6263 VmaSuballocationType type,
6264 VkDeviceSize allocSize,
6267 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6268 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6271 static const VkDeviceSize MIN_NODE_SIZE = 32;
6272 static const size_t MAX_LEVELS = 30;
6274 struct ValidationContext
6276 size_t calculatedAllocationCount;
6277 size_t calculatedFreeCount;
6278 VkDeviceSize calculatedSumFreeSize;
6280 ValidationContext() :
6281 calculatedAllocationCount(0),
6282 calculatedFreeCount(0),
6283 calculatedSumFreeSize(0) { }
6288 VkDeviceSize offset;
6318 VkDeviceSize m_UsableSize;
6319 uint32_t m_LevelCount;
6325 } m_FreeList[MAX_LEVELS];
6327 size_t m_AllocationCount;
6331 VkDeviceSize m_SumFreeSize;
6333 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6334 void DeleteNode(Node* node);
6335 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6336 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6337 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6339 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6340 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6344 void AddToFreeListFront(uint32_t level, Node* node);
6348 void RemoveFromFreeList(uint32_t level, Node* node);
6350 #if VMA_STATS_STRING_ENABLED
6351 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6361 class VmaDeviceMemoryBlock
6363 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6365 VmaBlockMetadata* m_pMetadata;
6369 ~VmaDeviceMemoryBlock()
6371 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6372 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6379 uint32_t newMemoryTypeIndex,
6380 VkDeviceMemory newMemory,
6381 VkDeviceSize newSize,
6383 uint32_t algorithm);
6387 VmaPool GetParentPool()
const {
return m_hParentPool; }
6388 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6389 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6390 uint32_t GetId()
const {
return m_Id; }
6391 void* GetMappedData()
const {
return m_pMappedData; }
6394 bool Validate()
const;
6399 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6402 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6403 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6405 VkResult BindBufferMemory(
6408 VkDeviceSize allocationLocalOffset,
6411 VkResult BindImageMemory(
6414 VkDeviceSize allocationLocalOffset,
6420 uint32_t m_MemoryTypeIndex;
6422 VkDeviceMemory m_hMemory;
6430 uint32_t m_MapCount;
6431 void* m_pMappedData;
6434 struct VmaPointerLess
6436 bool operator()(
const void* lhs,
const void* rhs)
const
6442 struct VmaDefragmentationMove
6444 size_t srcBlockIndex;
6445 size_t dstBlockIndex;
6446 VkDeviceSize srcOffset;
6447 VkDeviceSize dstOffset;
6450 VmaDeviceMemoryBlock* pSrcBlock;
6451 VmaDeviceMemoryBlock* pDstBlock;
6454 class VmaDefragmentationAlgorithm;
6462 struct VmaBlockVector
6464 VMA_CLASS_NO_COPY(VmaBlockVector)
6469 uint32_t memoryTypeIndex,
6470 VkDeviceSize preferredBlockSize,
6471 size_t minBlockCount,
6472 size_t maxBlockCount,
6473 VkDeviceSize bufferImageGranularity,
6474 uint32_t frameInUseCount,
6475 bool explicitBlockSize,
6476 uint32_t algorithm);
6479 VkResult CreateMinBlocks();
6481 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6482 VmaPool GetParentPool()
const {
return m_hParentPool; }
6483 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6484 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6485 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6486 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6487 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6488 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6493 bool IsCorruptionDetectionEnabled()
const;
6496 uint32_t currentFrameIndex,
6498 VkDeviceSize alignment,
6500 VmaSuballocationType suballocType,
6501 size_t allocationCount,
6509 #if VMA_STATS_STRING_ENABLED
6510 void PrintDetailedMap(
class VmaJsonWriter& json);
6513 void MakePoolAllocationsLost(
6514 uint32_t currentFrameIndex,
6515 size_t* pLostAllocationCount);
6516 VkResult CheckCorruption();
6520 class VmaBlockVectorDefragmentationContext* pCtx,
6522 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6523 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6524 VkCommandBuffer commandBuffer);
6525 void DefragmentationEnd(
6526 class VmaBlockVectorDefragmentationContext* pCtx,
6529 uint32_t ProcessDefragmentations(
6530 class VmaBlockVectorDefragmentationContext *pCtx,
6533 void CommitDefragmentations(
6534 class VmaBlockVectorDefragmentationContext *pCtx,
6540 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6541 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6542 size_t CalcAllocationCount()
const;
6543 bool IsBufferImageGranularityConflictPossible()
const;
6546 friend class VmaDefragmentationAlgorithm_Generic;
6550 const uint32_t m_MemoryTypeIndex;
6551 const VkDeviceSize m_PreferredBlockSize;
6552 const size_t m_MinBlockCount;
6553 const size_t m_MaxBlockCount;
6554 const VkDeviceSize m_BufferImageGranularity;
6555 const uint32_t m_FrameInUseCount;
6556 const bool m_ExplicitBlockSize;
6557 const uint32_t m_Algorithm;
6558 VMA_RW_MUTEX m_Mutex;
6562 bool m_HasEmptyBlock;
6564 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6565 uint32_t m_NextBlockId;
6567 VkDeviceSize CalcMaxBlockSize()
const;
6570 void Remove(VmaDeviceMemoryBlock* pBlock);
6574 void IncrementallySortBlocks();
6576 VkResult AllocatePage(
6577 uint32_t currentFrameIndex,
6579 VkDeviceSize alignment,
6581 VmaSuballocationType suballocType,
6585 VkResult AllocateFromBlock(
6586 VmaDeviceMemoryBlock* pBlock,
6587 uint32_t currentFrameIndex,
6589 VkDeviceSize alignment,
6592 VmaSuballocationType suballocType,
6596 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6599 void ApplyDefragmentationMovesCpu(
6600 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6601 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6603 void ApplyDefragmentationMovesGpu(
6604 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6605 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6606 VkCommandBuffer commandBuffer);
6614 void UpdateHasEmptyBlock();
6619 VMA_CLASS_NO_COPY(VmaPool_T)
6621 VmaBlockVector m_BlockVector;
6626 VkDeviceSize preferredBlockSize);
6629 uint32_t GetId()
const {
return m_Id; }
6630 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6632 const char* GetName()
const {
return m_Name; }
6633 void SetName(
const char* pName);
6635 #if VMA_STATS_STRING_ENABLED
6651 class VmaDefragmentationAlgorithm
6653 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6655 VmaDefragmentationAlgorithm(
6657 VmaBlockVector* pBlockVector,
6658 uint32_t currentFrameIndex) :
6659 m_hAllocator(hAllocator),
6660 m_pBlockVector(pBlockVector),
6661 m_CurrentFrameIndex(currentFrameIndex)
6664 virtual ~VmaDefragmentationAlgorithm()
6668 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6669 virtual void AddAll() = 0;
6671 virtual VkResult Defragment(
6672 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6673 VkDeviceSize maxBytesToMove,
6674 uint32_t maxAllocationsToMove,
6677 virtual VkDeviceSize GetBytesMoved()
const = 0;
6678 virtual uint32_t GetAllocationsMoved()
const = 0;
6682 VmaBlockVector*
const m_pBlockVector;
6683 const uint32_t m_CurrentFrameIndex;
6685 struct AllocationInfo
6688 VkBool32* m_pChanged;
6691 m_hAllocation(VK_NULL_HANDLE),
6692 m_pChanged(VMA_NULL)
6696 m_hAllocation(hAlloc),
6697 m_pChanged(pChanged)
6703 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6705 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6707 VmaDefragmentationAlgorithm_Generic(
6709 VmaBlockVector* pBlockVector,
6710 uint32_t currentFrameIndex,
6711 bool overlappingMoveSupported);
6712 virtual ~VmaDefragmentationAlgorithm_Generic();
6714 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6715 virtual void AddAll() { m_AllAllocations =
true; }
6717 virtual VkResult Defragment(
6718 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6719 VkDeviceSize maxBytesToMove,
6720 uint32_t maxAllocationsToMove,
6723 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6724 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6727 uint32_t m_AllocationCount;
6728 bool m_AllAllocations;
6730 VkDeviceSize m_BytesMoved;
6731 uint32_t m_AllocationsMoved;
6733 struct AllocationInfoSizeGreater
6735 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6737 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6741 struct AllocationInfoOffsetGreater
6743 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6745 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6751 size_t m_OriginalBlockIndex;
6752 VmaDeviceMemoryBlock* m_pBlock;
6753 bool m_HasNonMovableAllocations;
6754 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6756 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6757 m_OriginalBlockIndex(SIZE_MAX),
6759 m_HasNonMovableAllocations(true),
6760 m_Allocations(pAllocationCallbacks)
6764 void CalcHasNonMovableAllocations()
6766 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6767 const size_t defragmentAllocCount = m_Allocations.size();
6768 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6771 void SortAllocationsBySizeDescending()
6773 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6776 void SortAllocationsByOffsetDescending()
6778 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6782 struct BlockPointerLess
6784 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
6786 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6788 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6790 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6796 struct BlockInfoCompareMoveDestination
6798 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6800 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6804 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6808 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6816 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6817 BlockInfoVector m_Blocks;
6819 VkResult DefragmentRound(
6820 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6821 VkDeviceSize maxBytesToMove,
6822 uint32_t maxAllocationsToMove,
6823 bool freeOldAllocations);
6825 size_t CalcBlocksWithNonMovableCount()
const;
6827 static bool MoveMakesSense(
6828 size_t dstBlockIndex, VkDeviceSize dstOffset,
6829 size_t srcBlockIndex, VkDeviceSize srcOffset);
6832 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6834 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6836 VmaDefragmentationAlgorithm_Fast(
6838 VmaBlockVector* pBlockVector,
6839 uint32_t currentFrameIndex,
6840 bool overlappingMoveSupported);
6841 virtual ~VmaDefragmentationAlgorithm_Fast();
6843 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6844 virtual void AddAll() { m_AllAllocations =
true; }
6846 virtual VkResult Defragment(
6847 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6848 VkDeviceSize maxBytesToMove,
6849 uint32_t maxAllocationsToMove,
6852 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6853 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6858 size_t origBlockIndex;
6861 class FreeSpaceDatabase
6867 s.blockInfoIndex = SIZE_MAX;
6868 for(
size_t i = 0; i < MAX_COUNT; ++i)
6870 m_FreeSpaces[i] = s;
6874 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6876 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6882 size_t bestIndex = SIZE_MAX;
6883 for(
size_t i = 0; i < MAX_COUNT; ++i)
6886 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6891 if(m_FreeSpaces[i].size < size &&
6892 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6898 if(bestIndex != SIZE_MAX)
6900 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6901 m_FreeSpaces[bestIndex].offset = offset;
6902 m_FreeSpaces[bestIndex].size = size;
6906 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6907 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6909 size_t bestIndex = SIZE_MAX;
6910 VkDeviceSize bestFreeSpaceAfter = 0;
6911 for(
size_t i = 0; i < MAX_COUNT; ++i)
6914 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6916 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6918 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6920 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6922 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6925 bestFreeSpaceAfter = freeSpaceAfter;
6931 if(bestIndex != SIZE_MAX)
6933 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6934 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6936 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6939 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6940 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6941 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6946 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6956 static const size_t MAX_COUNT = 4;
6960 size_t blockInfoIndex;
6961 VkDeviceSize offset;
6963 } m_FreeSpaces[MAX_COUNT];
6966 const bool m_OverlappingMoveSupported;
6968 uint32_t m_AllocationCount;
6969 bool m_AllAllocations;
6971 VkDeviceSize m_BytesMoved;
6972 uint32_t m_AllocationsMoved;
6974 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6976 void PreprocessMetadata();
6977 void PostprocessMetadata();
6978 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6981 struct VmaBlockDefragmentationContext
6985 BLOCK_FLAG_USED = 0x00000001,
6991 class VmaBlockVectorDefragmentationContext
6993 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6997 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6998 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
6999 uint32_t defragmentationMovesProcessed;
7000 uint32_t defragmentationMovesCommitted;
7001 bool hasDefragmentationPlan;
7003 VmaBlockVectorDefragmentationContext(
7006 VmaBlockVector* pBlockVector,
7007 uint32_t currFrameIndex);
7008 ~VmaBlockVectorDefragmentationContext();
7010 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7011 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7012 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7014 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7015 void AddAll() { m_AllAllocations =
true; }
7024 VmaBlockVector*
const m_pBlockVector;
7025 const uint32_t m_CurrFrameIndex;
7027 VmaDefragmentationAlgorithm* m_pAlgorithm;
7035 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7036 bool m_AllAllocations;
7039 struct VmaDefragmentationContext_T
7042 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7044 VmaDefragmentationContext_T(
7046 uint32_t currFrameIndex,
7049 ~VmaDefragmentationContext_T();
7051 void AddPools(uint32_t poolCount,
VmaPool* pPools);
7052 void AddAllocations(
7053 uint32_t allocationCount,
7055 VkBool32* pAllocationsChanged);
7063 VkResult Defragment(
7064 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7065 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7069 VkResult DefragmentPassEnd();
7073 const uint32_t m_CurrFrameIndex;
7074 const uint32_t m_Flags;
7077 VkDeviceSize m_MaxCpuBytesToMove;
7078 uint32_t m_MaxCpuAllocationsToMove;
7079 VkDeviceSize m_MaxGpuBytesToMove;
7080 uint32_t m_MaxGpuAllocationsToMove;
7083 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7085 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7088 #if VMA_RECORDING_ENABLED
7095 void WriteConfiguration(
7096 const VkPhysicalDeviceProperties& devProps,
7097 const VkPhysicalDeviceMemoryProperties& memProps,
7098 uint32_t vulkanApiVersion,
7099 bool dedicatedAllocationExtensionEnabled,
7100 bool bindMemory2ExtensionEnabled,
7101 bool memoryBudgetExtensionEnabled,
7102 bool deviceCoherentMemoryExtensionEnabled);
7105 void RecordCreateAllocator(uint32_t frameIndex);
7106 void RecordDestroyAllocator(uint32_t frameIndex);
7107 void RecordCreatePool(uint32_t frameIndex,
7110 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7111 void RecordAllocateMemory(uint32_t frameIndex,
7112 const VkMemoryRequirements& vkMemReq,
7115 void RecordAllocateMemoryPages(uint32_t frameIndex,
7116 const VkMemoryRequirements& vkMemReq,
7118 uint64_t allocationCount,
7120 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7121 const VkMemoryRequirements& vkMemReq,
7122 bool requiresDedicatedAllocation,
7123 bool prefersDedicatedAllocation,
7126 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7127 const VkMemoryRequirements& vkMemReq,
7128 bool requiresDedicatedAllocation,
7129 bool prefersDedicatedAllocation,
7132 void RecordFreeMemory(uint32_t frameIndex,
7134 void RecordFreeMemoryPages(uint32_t frameIndex,
7135 uint64_t allocationCount,
7137 void RecordSetAllocationUserData(uint32_t frameIndex,
7139 const void* pUserData);
7140 void RecordCreateLostAllocation(uint32_t frameIndex,
7142 void RecordMapMemory(uint32_t frameIndex,
7144 void RecordUnmapMemory(uint32_t frameIndex,
7146 void RecordFlushAllocation(uint32_t frameIndex,
7147 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7148 void RecordInvalidateAllocation(uint32_t frameIndex,
7149 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7150 void RecordCreateBuffer(uint32_t frameIndex,
7151 const VkBufferCreateInfo& bufCreateInfo,
7154 void RecordCreateImage(uint32_t frameIndex,
7155 const VkImageCreateInfo& imageCreateInfo,
7158 void RecordDestroyBuffer(uint32_t frameIndex,
7160 void RecordDestroyImage(uint32_t frameIndex,
7162 void RecordTouchAllocation(uint32_t frameIndex,
7164 void RecordGetAllocationInfo(uint32_t frameIndex,
7166 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
7168 void RecordDefragmentationBegin(uint32_t frameIndex,
7171 void RecordDefragmentationEnd(uint32_t frameIndex,
7173 void RecordSetPoolName(uint32_t frameIndex,
7184 class UserDataString
7188 const char* GetString()
const {
return m_Str; }
7198 VMA_MUTEX m_FileMutex;
7200 int64_t m_StartCounter;
7202 void GetBasicParams(CallParams& outParams);
7205 template<
typename T>
7206 void PrintPointerList(uint64_t count,
const T* pItems)
7210 fprintf(m_File,
"%p", pItems[0]);
7211 for(uint64_t i = 1; i < count; ++i)
7213 fprintf(m_File,
" %p", pItems[i]);
7218 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
7222 #endif // #if VMA_RECORDING_ENABLED
7227 class VmaAllocationObjectAllocator
7229 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
7231 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7233 template<
typename... Types>
VmaAllocation Allocate(Types... args);
7238 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7241 struct VmaCurrentBudgetData
7243 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7244 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7246 #if VMA_MEMORY_BUDGET
7247 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7248 VMA_RW_MUTEX m_BudgetMutex;
7249 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7250 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7251 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7252 #endif // #if VMA_MEMORY_BUDGET
7254 VmaCurrentBudgetData()
7256 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7258 m_BlockBytes[heapIndex] = 0;
7259 m_AllocationBytes[heapIndex] = 0;
7260 #if VMA_MEMORY_BUDGET
7261 m_VulkanUsage[heapIndex] = 0;
7262 m_VulkanBudget[heapIndex] = 0;
7263 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7267 #if VMA_MEMORY_BUDGET
7268 m_OperationsSinceBudgetFetch = 0;
7272 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7274 m_AllocationBytes[heapIndex] += allocationSize;
7275 #if VMA_MEMORY_BUDGET
7276 ++m_OperationsSinceBudgetFetch;
7280 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7282 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7283 m_AllocationBytes[heapIndex] -= allocationSize;
7284 #if VMA_MEMORY_BUDGET
7285 ++m_OperationsSinceBudgetFetch;
7291 struct VmaAllocator_T
7293 VMA_CLASS_NO_COPY(VmaAllocator_T)
7296 uint32_t m_VulkanApiVersion;
7297 bool m_UseKhrDedicatedAllocation;
7298 bool m_UseKhrBindMemory2;
7299 bool m_UseExtMemoryBudget;
7300 bool m_UseAmdDeviceCoherentMemory;
7301 bool m_UseKhrBufferDeviceAddress;
7303 VkInstance m_hInstance;
7304 bool m_AllocationCallbacksSpecified;
7305 VkAllocationCallbacks m_AllocationCallbacks;
7307 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7310 uint32_t m_HeapSizeLimitMask;
7312 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7313 VkPhysicalDeviceMemoryProperties m_MemProps;
7316 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7319 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7320 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7321 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7323 VmaCurrentBudgetData m_Budget;
7329 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7331 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7335 return m_VulkanFunctions;
7338 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
7340 VkDeviceSize GetBufferImageGranularity()
const
7343 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7344 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7347 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7348 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7350 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7352 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7353 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7356 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7358 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7359 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7362 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7364 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7365 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7366 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7369 bool IsIntegratedGpu()
const
7371 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7374 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
7376 #if VMA_RECORDING_ENABLED
7377 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7380 void GetBufferMemoryRequirements(
7382 VkMemoryRequirements& memReq,
7383 bool& requiresDedicatedAllocation,
7384 bool& prefersDedicatedAllocation)
const;
7385 void GetImageMemoryRequirements(
7387 VkMemoryRequirements& memReq,
7388 bool& requiresDedicatedAllocation,
7389 bool& prefersDedicatedAllocation)
const;
7392 VkResult AllocateMemory(
7393 const VkMemoryRequirements& vkMemReq,
7394 bool requiresDedicatedAllocation,
7395 bool prefersDedicatedAllocation,
7396 VkBuffer dedicatedBuffer,
7397 VkBufferUsageFlags dedicatedBufferUsage,
7398 VkImage dedicatedImage,
7400 VmaSuballocationType suballocType,
7401 size_t allocationCount,
7406 size_t allocationCount,
7409 VkResult ResizeAllocation(
7411 VkDeviceSize newSize);
7413 void CalculateStats(
VmaStats* pStats);
7416 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7418 #if VMA_STATS_STRING_ENABLED
7419 void PrintDetailedMap(
class VmaJsonWriter& json);
7422 VkResult DefragmentationBegin(
7426 VkResult DefragmentationEnd(
7429 VkResult DefragmentationPassBegin(
7432 VkResult DefragmentationPassEnd(
7439 void DestroyPool(
VmaPool pool);
7442 void SetCurrentFrameIndex(uint32_t frameIndex);
7443 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7445 void MakePoolAllocationsLost(
7447 size_t* pLostAllocationCount);
7448 VkResult CheckPoolCorruption(
VmaPool hPool);
7449 VkResult CheckCorruption(uint32_t memoryTypeBits);
7454 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7456 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7458 VkResult BindVulkanBuffer(
7459 VkDeviceMemory memory,
7460 VkDeviceSize memoryOffset,
7464 VkResult BindVulkanImage(
7465 VkDeviceMemory memory,
7466 VkDeviceSize memoryOffset,
7473 VkResult BindBufferMemory(
7475 VkDeviceSize allocationLocalOffset,
7478 VkResult BindImageMemory(
7480 VkDeviceSize allocationLocalOffset,
7484 void FlushOrInvalidateAllocation(
7486 VkDeviceSize offset, VkDeviceSize size,
7487 VMA_CACHE_OPERATION op);
7489 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7495 uint32_t GetGpuDefragmentationMemoryTypeBits();
7498 VkDeviceSize m_PreferredLargeHeapBlockSize;
7500 VkPhysicalDevice m_PhysicalDevice;
7501 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7502 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7504 VMA_RW_MUTEX m_PoolsMutex;
7506 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7507 uint32_t m_NextPoolId;
7512 uint32_t m_GlobalMemoryTypeBits;
7514 #if VMA_RECORDING_ENABLED
7515 VmaRecorder* m_pRecorder;
7520 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7522 VkResult AllocateMemoryOfType(
7524 VkDeviceSize alignment,
7525 bool dedicatedAllocation,
7526 VkBuffer dedicatedBuffer,
7527 VkBufferUsageFlags dedicatedBufferUsage,
7528 VkImage dedicatedImage,
7530 uint32_t memTypeIndex,
7531 VmaSuballocationType suballocType,
7532 size_t allocationCount,
7536 VkResult AllocateDedicatedMemoryPage(
7538 VmaSuballocationType suballocType,
7539 uint32_t memTypeIndex,
7540 const VkMemoryAllocateInfo& allocInfo,
7542 bool isUserDataString,
7547 VkResult AllocateDedicatedMemory(
7549 VmaSuballocationType suballocType,
7550 uint32_t memTypeIndex,
7553 bool isUserDataString,
7555 VkBuffer dedicatedBuffer,
7556 VkBufferUsageFlags dedicatedBufferUsage,
7557 VkImage dedicatedImage,
7558 size_t allocationCount,
7567 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7569 uint32_t CalculateGlobalMemoryTypeBits()
const;
7571 #if VMA_MEMORY_BUDGET
7572 void UpdateVulkanBudget();
7573 #endif // #if VMA_MEMORY_BUDGET
7579 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7581 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7584 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7586 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7589 template<
typename T>
7592 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7595 template<
typename T>
7596 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7598 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7601 template<
typename T>
7602 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7607 VmaFree(hAllocator, ptr);
7611 template<
typename T>
7612 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7616 for(
size_t i = count; i--; )
7618 VmaFree(hAllocator, ptr);
7625 #if VMA_STATS_STRING_ENABLED
7627 class VmaStringBuilder
7630 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7631 size_t GetLength()
const {
return m_Data.size(); }
7632 const char* GetData()
const {
return m_Data.data(); }
7634 void Add(
char ch) { m_Data.push_back(ch); }
7635 void Add(
const char* pStr);
7636 void AddNewLine() { Add(
'\n'); }
7637 void AddNumber(uint32_t num);
7638 void AddNumber(uint64_t num);
7639 void AddPointer(
const void* ptr);
7642 VmaVector< char, VmaStlAllocator<char> > m_Data;
7645 void VmaStringBuilder::Add(
const char* pStr)
7647 const size_t strLen = strlen(pStr);
7650 const size_t oldCount = m_Data.size();
7651 m_Data.resize(oldCount + strLen);
7652 memcpy(m_Data.data() + oldCount, pStr, strLen);
7656 void VmaStringBuilder::AddNumber(uint32_t num)
7663 *--p =
'0' + (num % 10);
7670 void VmaStringBuilder::AddNumber(uint64_t num)
7677 *--p =
'0' + (num % 10);
7684 void VmaStringBuilder::AddPointer(
const void* ptr)
7687 VmaPtrToStr(buf,
sizeof(buf), ptr);
7691 #endif // #if VMA_STATS_STRING_ENABLED
7696 #if VMA_STATS_STRING_ENABLED
7700 VMA_CLASS_NO_COPY(VmaJsonWriter)
7702 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7705 void BeginObject(
bool singleLine =
false);
7708 void BeginArray(
bool singleLine =
false);
7711 void WriteString(
const char* pStr);
7712 void BeginString(
const char* pStr = VMA_NULL);
7713 void ContinueString(
const char* pStr);
7714 void ContinueString(uint32_t n);
7715 void ContinueString(uint64_t n);
7716 void ContinueString_Pointer(
const void* ptr);
7717 void EndString(
const char* pStr = VMA_NULL);
7719 void WriteNumber(uint32_t n);
7720 void WriteNumber(uint64_t n);
7721 void WriteBool(
bool b);
7725 static const char*
const INDENT;
7727 enum COLLECTION_TYPE
7729 COLLECTION_TYPE_OBJECT,
7730 COLLECTION_TYPE_ARRAY,
7734 COLLECTION_TYPE type;
7735 uint32_t valueCount;
7736 bool singleLineMode;
7739 VmaStringBuilder& m_SB;
7740 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7741 bool m_InsideString;
7743 void BeginValue(
bool isString);
7744 void WriteIndent(
bool oneLess =
false);
7747 const char*
const VmaJsonWriter::INDENT =
" ";
7749 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7751 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7752 m_InsideString(false)
7756 VmaJsonWriter::~VmaJsonWriter()
7758 VMA_ASSERT(!m_InsideString);
7759 VMA_ASSERT(m_Stack.empty());
7762 void VmaJsonWriter::BeginObject(
bool singleLine)
7764 VMA_ASSERT(!m_InsideString);
7770 item.type = COLLECTION_TYPE_OBJECT;
7771 item.valueCount = 0;
7772 item.singleLineMode = singleLine;
7773 m_Stack.push_back(item);
7776 void VmaJsonWriter::EndObject()
7778 VMA_ASSERT(!m_InsideString);
7783 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7787 void VmaJsonWriter::BeginArray(
bool singleLine)
7789 VMA_ASSERT(!m_InsideString);
7795 item.type = COLLECTION_TYPE_ARRAY;
7796 item.valueCount = 0;
7797 item.singleLineMode = singleLine;
7798 m_Stack.push_back(item);
7801 void VmaJsonWriter::EndArray()
7803 VMA_ASSERT(!m_InsideString);
7808 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7812 void VmaJsonWriter::WriteString(
const char* pStr)
7818 void VmaJsonWriter::BeginString(
const char* pStr)
7820 VMA_ASSERT(!m_InsideString);
7824 m_InsideString =
true;
7825 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7827 ContinueString(pStr);
7831 void VmaJsonWriter::ContinueString(
const char* pStr)
7833 VMA_ASSERT(m_InsideString);
7835 const size_t strLen = strlen(pStr);
7836 for(
size_t i = 0; i < strLen; ++i)
7869 VMA_ASSERT(0 &&
"Character not currently supported.");
7875 void VmaJsonWriter::ContinueString(uint32_t n)
7877 VMA_ASSERT(m_InsideString);
7881 void VmaJsonWriter::ContinueString(uint64_t n)
7883 VMA_ASSERT(m_InsideString);
7887 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7889 VMA_ASSERT(m_InsideString);
7890 m_SB.AddPointer(ptr);
7893 void VmaJsonWriter::EndString(
const char* pStr)
7895 VMA_ASSERT(m_InsideString);
7896 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7898 ContinueString(pStr);
7901 m_InsideString =
false;
7904 void VmaJsonWriter::WriteNumber(uint32_t n)
7906 VMA_ASSERT(!m_InsideString);
7911 void VmaJsonWriter::WriteNumber(uint64_t n)
7913 VMA_ASSERT(!m_InsideString);
7918 void VmaJsonWriter::WriteBool(
bool b)
7920 VMA_ASSERT(!m_InsideString);
7922 m_SB.Add(b ?
"true" :
"false");
7925 void VmaJsonWriter::WriteNull()
7927 VMA_ASSERT(!m_InsideString);
7932 void VmaJsonWriter::BeginValue(
bool isString)
7934 if(!m_Stack.empty())
7936 StackItem& currItem = m_Stack.back();
7937 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7938 currItem.valueCount % 2 == 0)
7940 VMA_ASSERT(isString);
7943 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7944 currItem.valueCount % 2 != 0)
7948 else if(currItem.valueCount > 0)
7957 ++currItem.valueCount;
7961 void VmaJsonWriter::WriteIndent(
bool oneLess)
7963 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7967 size_t count = m_Stack.size();
7968 if(count > 0 && oneLess)
7972 for(
size_t i = 0; i < count; ++i)
7979 #endif // #if VMA_STATS_STRING_ENABLED
7983 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7985 if(IsUserDataString())
7987 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7989 FreeUserDataString(hAllocator);
7991 if(pUserData != VMA_NULL)
7993 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
7998 m_pUserData = pUserData;
8002 void VmaAllocation_T::ChangeBlockAllocation(
8004 VmaDeviceMemoryBlock* block,
8005 VkDeviceSize offset)
8007 VMA_ASSERT(block != VMA_NULL);
8008 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8011 if(block != m_BlockAllocation.m_Block)
8013 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8014 if(IsPersistentMap())
8016 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8017 block->Map(hAllocator, mapRefCount, VMA_NULL);
8020 m_BlockAllocation.m_Block = block;
8021 m_BlockAllocation.m_Offset = offset;
8024 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8026 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8027 m_BlockAllocation.m_Offset = newOffset;
8030 VkDeviceSize VmaAllocation_T::GetOffset()
const
8034 case ALLOCATION_TYPE_BLOCK:
8035 return m_BlockAllocation.m_Offset;
8036 case ALLOCATION_TYPE_DEDICATED:
8044 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8048 case ALLOCATION_TYPE_BLOCK:
8049 return m_BlockAllocation.m_Block->GetDeviceMemory();
8050 case ALLOCATION_TYPE_DEDICATED:
8051 return m_DedicatedAllocation.m_hMemory;
8054 return VK_NULL_HANDLE;
8058 void* VmaAllocation_T::GetMappedData()
const
8062 case ALLOCATION_TYPE_BLOCK:
8065 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8066 VMA_ASSERT(pBlockData != VMA_NULL);
8067 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8074 case ALLOCATION_TYPE_DEDICATED:
8075 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8076 return m_DedicatedAllocation.m_pMappedData;
8083 bool VmaAllocation_T::CanBecomeLost()
const
8087 case ALLOCATION_TYPE_BLOCK:
8088 return m_BlockAllocation.m_CanBecomeLost;
8089 case ALLOCATION_TYPE_DEDICATED:
8097 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8099 VMA_ASSERT(CanBecomeLost());
8105 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8108 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8113 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8119 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8129 #if VMA_STATS_STRING_ENABLED
8132 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
8141 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
8143 json.WriteString(
"Type");
8144 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
8146 json.WriteString(
"Size");
8147 json.WriteNumber(m_Size);
8149 if(m_pUserData != VMA_NULL)
8151 json.WriteString(
"UserData");
8152 if(IsUserDataString())
8154 json.WriteString((
const char*)m_pUserData);
8159 json.ContinueString_Pointer(m_pUserData);
8164 json.WriteString(
"CreationFrameIndex");
8165 json.WriteNumber(m_CreationFrameIndex);
8167 json.WriteString(
"LastUseFrameIndex");
8168 json.WriteNumber(GetLastUseFrameIndex());
8170 if(m_BufferImageUsage != 0)
8172 json.WriteString(
"Usage");
8173 json.WriteNumber(m_BufferImageUsage);
8179 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
8181 VMA_ASSERT(IsUserDataString());
8182 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
8183 m_pUserData = VMA_NULL;
8186 void VmaAllocation_T::BlockAllocMap()
8188 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8190 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8196 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
8200 void VmaAllocation_T::BlockAllocUnmap()
8202 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8204 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8210 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
8214 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
8216 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8220 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8222 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
8223 *ppData = m_DedicatedAllocation.m_pMappedData;
8229 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
8230 return VK_ERROR_MEMORY_MAP_FAILED;
8235 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
8236 hAllocator->m_hDevice,
8237 m_DedicatedAllocation.m_hMemory,
8242 if(result == VK_SUCCESS)
8244 m_DedicatedAllocation.m_pMappedData = *ppData;
8251 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
8253 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8255 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8260 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8261 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8262 hAllocator->m_hDevice,
8263 m_DedicatedAllocation.m_hMemory);
8268 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8272 #if VMA_STATS_STRING_ENABLED
8274 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8278 json.WriteString(
"Blocks");
8281 json.WriteString(
"Allocations");
8284 json.WriteString(
"UnusedRanges");
8287 json.WriteString(
"UsedBytes");
8290 json.WriteString(
"UnusedBytes");
8295 json.WriteString(
"AllocationSize");
8296 json.BeginObject(
true);
8297 json.WriteString(
"Min");
8299 json.WriteString(
"Avg");
8301 json.WriteString(
"Max");
8308 json.WriteString(
"UnusedRangeSize");
8309 json.BeginObject(
true);
8310 json.WriteString(
"Min");
8312 json.WriteString(
"Avg");
8314 json.WriteString(
"Max");
8322 #endif // #if VMA_STATS_STRING_ENABLED
8324 struct VmaSuballocationItemSizeLess
8327 const VmaSuballocationList::iterator lhs,
8328 const VmaSuballocationList::iterator rhs)
const
8330 return lhs->size < rhs->size;
8333 const VmaSuballocationList::iterator lhs,
8334 VkDeviceSize rhsSize)
const
8336 return lhs->size < rhsSize;
8344 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8346 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8350 #if VMA_STATS_STRING_ENABLED
8352 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8353 VkDeviceSize unusedBytes,
8354 size_t allocationCount,
8355 size_t unusedRangeCount)
const
8359 json.WriteString(
"TotalBytes");
8360 json.WriteNumber(GetSize());
8362 json.WriteString(
"UnusedBytes");
8363 json.WriteNumber(unusedBytes);
8365 json.WriteString(
"Allocations");
8366 json.WriteNumber((uint64_t)allocationCount);
8368 json.WriteString(
"UnusedRanges");
8369 json.WriteNumber((uint64_t)unusedRangeCount);
8371 json.WriteString(
"Suballocations");
8375 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8376 VkDeviceSize offset,
8379 json.BeginObject(
true);
8381 json.WriteString(
"Offset");
8382 json.WriteNumber(offset);
8384 hAllocation->PrintParameters(json);
8389 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8390 VkDeviceSize offset,
8391 VkDeviceSize size)
const
8393 json.BeginObject(
true);
8395 json.WriteString(
"Offset");
8396 json.WriteNumber(offset);
8398 json.WriteString(
"Type");
8399 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8401 json.WriteString(
"Size");
8402 json.WriteNumber(size);
8407 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8413 #endif // #if VMA_STATS_STRING_ENABLED
8418 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8419 VmaBlockMetadata(hAllocator),
8422 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8423 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8427 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8431 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8433 VmaBlockMetadata::Init(size);
8436 m_SumFreeSize = size;
8438 VmaSuballocation suballoc = {};
8439 suballoc.offset = 0;
8440 suballoc.size = size;
8441 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8442 suballoc.hAllocation = VK_NULL_HANDLE;
8444 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8445 m_Suballocations.push_back(suballoc);
8446 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8448 m_FreeSuballocationsBySize.push_back(suballocItem);
8451 bool VmaBlockMetadata_Generic::Validate()
const
8453 VMA_VALIDATE(!m_Suballocations.empty());
8456 VkDeviceSize calculatedOffset = 0;
8458 uint32_t calculatedFreeCount = 0;
8460 VkDeviceSize calculatedSumFreeSize = 0;
8463 size_t freeSuballocationsToRegister = 0;
8465 bool prevFree =
false;
8467 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8468 suballocItem != m_Suballocations.cend();
8471 const VmaSuballocation& subAlloc = *suballocItem;
8474 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8476 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8478 VMA_VALIDATE(!prevFree || !currFree);
8480 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8484 calculatedSumFreeSize += subAlloc.size;
8485 ++calculatedFreeCount;
8486 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8488 ++freeSuballocationsToRegister;
8492 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8496 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8497 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8500 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8503 calculatedOffset += subAlloc.size;
8504 prevFree = currFree;
8509 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8511 VkDeviceSize lastSize = 0;
8512 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8514 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8517 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8519 VMA_VALIDATE(suballocItem->size >= lastSize);
8521 lastSize = suballocItem->size;
8525 VMA_VALIDATE(ValidateFreeSuballocationList());
8526 VMA_VALIDATE(calculatedOffset == GetSize());
8527 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8528 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8533 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8535 if(!m_FreeSuballocationsBySize.empty())
8537 return m_FreeSuballocationsBySize.back()->size;
8545 bool VmaBlockMetadata_Generic::IsEmpty()
const
8547 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8550 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8554 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8566 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8567 suballocItem != m_Suballocations.cend();
8570 const VmaSuballocation& suballoc = *suballocItem;
8571 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8584 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8586 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8588 inoutStats.
size += GetSize();
8595 #if VMA_STATS_STRING_ENABLED
8597 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8599 PrintDetailedMap_Begin(json,
8601 m_Suballocations.size() - (
size_t)m_FreeCount,
8605 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8606 suballocItem != m_Suballocations.cend();
8607 ++suballocItem, ++i)
8609 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8611 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8615 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8619 PrintDetailedMap_End(json);
8622 #endif // #if VMA_STATS_STRING_ENABLED
8624 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8625 uint32_t currentFrameIndex,
8626 uint32_t frameInUseCount,
8627 VkDeviceSize bufferImageGranularity,
8628 VkDeviceSize allocSize,
8629 VkDeviceSize allocAlignment,
8631 VmaSuballocationType allocType,
8632 bool canMakeOtherLost,
8634 VmaAllocationRequest* pAllocationRequest)
8636 VMA_ASSERT(allocSize > 0);
8637 VMA_ASSERT(!upperAddress);
8638 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8639 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8640 VMA_HEAVY_ASSERT(Validate());
8642 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8645 if(canMakeOtherLost ==
false &&
8646 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8652 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8653 if(freeSuballocCount > 0)
8658 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8659 m_FreeSuballocationsBySize.data(),
8660 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8661 allocSize + 2 * VMA_DEBUG_MARGIN,
8662 VmaSuballocationItemSizeLess());
8663 size_t index = it - m_FreeSuballocationsBySize.data();
8664 for(; index < freeSuballocCount; ++index)
8669 bufferImageGranularity,
8673 m_FreeSuballocationsBySize[index],
8675 &pAllocationRequest->offset,
8676 &pAllocationRequest->itemsToMakeLostCount,
8677 &pAllocationRequest->sumFreeSize,
8678 &pAllocationRequest->sumItemSize))
8680 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8685 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8687 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8688 it != m_Suballocations.end();
8691 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8694 bufferImageGranularity,
8700 &pAllocationRequest->offset,
8701 &pAllocationRequest->itemsToMakeLostCount,
8702 &pAllocationRequest->sumFreeSize,
8703 &pAllocationRequest->sumItemSize))
8705 pAllocationRequest->item = it;
8713 for(
size_t index = freeSuballocCount; index--; )
8718 bufferImageGranularity,
8722 m_FreeSuballocationsBySize[index],
8724 &pAllocationRequest->offset,
8725 &pAllocationRequest->itemsToMakeLostCount,
8726 &pAllocationRequest->sumFreeSize,
8727 &pAllocationRequest->sumItemSize))
8729 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8736 if(canMakeOtherLost)
8741 VmaAllocationRequest tmpAllocRequest = {};
8742 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8743 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8744 suballocIt != m_Suballocations.end();
8747 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8748 suballocIt->hAllocation->CanBecomeLost())
8753 bufferImageGranularity,
8759 &tmpAllocRequest.offset,
8760 &tmpAllocRequest.itemsToMakeLostCount,
8761 &tmpAllocRequest.sumFreeSize,
8762 &tmpAllocRequest.sumItemSize))
8766 *pAllocationRequest = tmpAllocRequest;
8767 pAllocationRequest->item = suballocIt;
8770 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8772 *pAllocationRequest = tmpAllocRequest;
8773 pAllocationRequest->item = suballocIt;
8786 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8787 uint32_t currentFrameIndex,
8788 uint32_t frameInUseCount,
8789 VmaAllocationRequest* pAllocationRequest)
8791 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8793 while(pAllocationRequest->itemsToMakeLostCount > 0)
8795 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8797 ++pAllocationRequest->item;
8799 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8800 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8801 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8802 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8804 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8805 --pAllocationRequest->itemsToMakeLostCount;
8813 VMA_HEAVY_ASSERT(Validate());
8814 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8815 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8820 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8822 uint32_t lostAllocationCount = 0;
8823 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8824 it != m_Suballocations.end();
8827 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8828 it->hAllocation->CanBecomeLost() &&
8829 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8831 it = FreeSuballocation(it);
8832 ++lostAllocationCount;
8835 return lostAllocationCount;
8838 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8840 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8841 it != m_Suballocations.end();
8844 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8846 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8848 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8849 return VK_ERROR_VALIDATION_FAILED_EXT;
8851 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8853 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8854 return VK_ERROR_VALIDATION_FAILED_EXT;
8862 void VmaBlockMetadata_Generic::Alloc(
8863 const VmaAllocationRequest& request,
8864 VmaSuballocationType type,
8865 VkDeviceSize allocSize,
8868 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8869 VMA_ASSERT(request.item != m_Suballocations.end());
8870 VmaSuballocation& suballoc = *request.item;
8872 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8874 VMA_ASSERT(request.offset >= suballoc.offset);
8875 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8876 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8877 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8881 UnregisterFreeSuballocation(request.item);
8883 suballoc.offset = request.offset;
8884 suballoc.size = allocSize;
8885 suballoc.type = type;
8886 suballoc.hAllocation = hAllocation;
8891 VmaSuballocation paddingSuballoc = {};
8892 paddingSuballoc.offset = request.offset + allocSize;
8893 paddingSuballoc.size = paddingEnd;
8894 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8895 VmaSuballocationList::iterator next = request.item;
8897 const VmaSuballocationList::iterator paddingEndItem =
8898 m_Suballocations.insert(next, paddingSuballoc);
8899 RegisterFreeSuballocation(paddingEndItem);
8905 VmaSuballocation paddingSuballoc = {};
8906 paddingSuballoc.offset = request.offset - paddingBegin;
8907 paddingSuballoc.size = paddingBegin;
8908 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8909 const VmaSuballocationList::iterator paddingBeginItem =
8910 m_Suballocations.insert(request.item, paddingSuballoc);
8911 RegisterFreeSuballocation(paddingBeginItem);
8915 m_FreeCount = m_FreeCount - 1;
8916 if(paddingBegin > 0)
8924 m_SumFreeSize -= allocSize;
8927 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8929 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8930 suballocItem != m_Suballocations.end();
8933 VmaSuballocation& suballoc = *suballocItem;
8934 if(suballoc.hAllocation == allocation)
8936 FreeSuballocation(suballocItem);
8937 VMA_HEAVY_ASSERT(Validate());
8941 VMA_ASSERT(0 &&
"Not found!");
8944 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8946 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8947 suballocItem != m_Suballocations.end();
8950 VmaSuballocation& suballoc = *suballocItem;
8951 if(suballoc.offset == offset)
8953 FreeSuballocation(suballocItem);
8957 VMA_ASSERT(0 &&
"Not found!");
8960 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
8962 VkDeviceSize lastSize = 0;
8963 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8965 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8967 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8968 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8969 VMA_VALIDATE(it->size >= lastSize);
8970 lastSize = it->size;
8975 bool VmaBlockMetadata_Generic::CheckAllocation(
8976 uint32_t currentFrameIndex,
8977 uint32_t frameInUseCount,
8978 VkDeviceSize bufferImageGranularity,
8979 VkDeviceSize allocSize,
8980 VkDeviceSize allocAlignment,
8981 VmaSuballocationType allocType,
8982 VmaSuballocationList::const_iterator suballocItem,
8983 bool canMakeOtherLost,
8984 VkDeviceSize* pOffset,
8985 size_t* itemsToMakeLostCount,
8986 VkDeviceSize* pSumFreeSize,
8987 VkDeviceSize* pSumItemSize)
const
8989 VMA_ASSERT(allocSize > 0);
8990 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8991 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8992 VMA_ASSERT(pOffset != VMA_NULL);
8994 *itemsToMakeLostCount = 0;
8998 if(canMakeOtherLost)
9000 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9002 *pSumFreeSize = suballocItem->size;
9006 if(suballocItem->hAllocation->CanBecomeLost() &&
9007 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9009 ++*itemsToMakeLostCount;
9010 *pSumItemSize = suballocItem->size;
9019 if(GetSize() - suballocItem->offset < allocSize)
9025 *pOffset = suballocItem->offset;
9028 if(VMA_DEBUG_MARGIN > 0)
9030 *pOffset += VMA_DEBUG_MARGIN;
9034 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9038 if(bufferImageGranularity > 1)
9040 bool bufferImageGranularityConflict =
false;
9041 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9042 while(prevSuballocItem != m_Suballocations.cbegin())
9045 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9046 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9048 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9050 bufferImageGranularityConflict =
true;
9058 if(bufferImageGranularityConflict)
9060 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9066 if(*pOffset >= suballocItem->offset + suballocItem->size)
9072 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9075 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9077 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9079 if(suballocItem->offset + totalSize > GetSize())
9086 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9087 if(totalSize > suballocItem->size)
9089 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9090 while(remainingSize > 0)
9093 if(lastSuballocItem == m_Suballocations.cend())
9097 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9099 *pSumFreeSize += lastSuballocItem->size;
9103 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9104 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9105 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9107 ++*itemsToMakeLostCount;
9108 *pSumItemSize += lastSuballocItem->size;
9115 remainingSize = (lastSuballocItem->size < remainingSize) ?
9116 remainingSize - lastSuballocItem->size : 0;
9122 if(bufferImageGranularity > 1)
9124 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9126 while(nextSuballocItem != m_Suballocations.cend())
9128 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9129 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9131 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9133 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9134 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9135 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9137 ++*itemsToMakeLostCount;
9156 const VmaSuballocation& suballoc = *suballocItem;
9157 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9159 *pSumFreeSize = suballoc.size;
9162 if(suballoc.size < allocSize)
9168 *pOffset = suballoc.offset;
9171 if(VMA_DEBUG_MARGIN > 0)
9173 *pOffset += VMA_DEBUG_MARGIN;
9177 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9181 if(bufferImageGranularity > 1)
9183 bool bufferImageGranularityConflict =
false;
9184 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9185 while(prevSuballocItem != m_Suballocations.cbegin())
9188 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9189 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9191 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9193 bufferImageGranularityConflict =
true;
9201 if(bufferImageGranularityConflict)
9203 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9208 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
9211 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9214 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
9221 if(bufferImageGranularity > 1)
9223 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
9225 while(nextSuballocItem != m_Suballocations.cend())
9227 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9228 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9230 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9249 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
9251 VMA_ASSERT(item != m_Suballocations.end());
9252 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9254 VmaSuballocationList::iterator nextItem = item;
9256 VMA_ASSERT(nextItem != m_Suballocations.end());
9257 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9259 item->size += nextItem->size;
9261 m_Suballocations.erase(nextItem);
9264 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9267 VmaSuballocation& suballoc = *suballocItem;
9268 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9269 suballoc.hAllocation = VK_NULL_HANDLE;
9273 m_SumFreeSize += suballoc.size;
9276 bool mergeWithNext =
false;
9277 bool mergeWithPrev =
false;
9279 VmaSuballocationList::iterator nextItem = suballocItem;
9281 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9283 mergeWithNext =
true;
9286 VmaSuballocationList::iterator prevItem = suballocItem;
9287 if(suballocItem != m_Suballocations.begin())
9290 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9292 mergeWithPrev =
true;
9298 UnregisterFreeSuballocation(nextItem);
9299 MergeFreeWithNext(suballocItem);
9304 UnregisterFreeSuballocation(prevItem);
9305 MergeFreeWithNext(prevItem);
9306 RegisterFreeSuballocation(prevItem);
9311 RegisterFreeSuballocation(suballocItem);
9312 return suballocItem;
9316 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9318 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9319 VMA_ASSERT(item->size > 0);
9323 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9325 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9327 if(m_FreeSuballocationsBySize.empty())
9329 m_FreeSuballocationsBySize.push_back(item);
9333 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9341 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9343 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9344 VMA_ASSERT(item->size > 0);
9348 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9350 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9352 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9353 m_FreeSuballocationsBySize.data(),
9354 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9356 VmaSuballocationItemSizeLess());
9357 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9358 index < m_FreeSuballocationsBySize.size();
9361 if(m_FreeSuballocationsBySize[index] == item)
9363 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9366 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9368 VMA_ASSERT(0 &&
"Not found.");
9374 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9375 VkDeviceSize bufferImageGranularity,
9376 VmaSuballocationType& inOutPrevSuballocType)
const
9378 if(bufferImageGranularity == 1 || IsEmpty())
9383 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9384 bool typeConflictFound =
false;
9385 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9386 it != m_Suballocations.cend();
9389 const VmaSuballocationType suballocType = it->type;
9390 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9392 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9393 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9395 typeConflictFound =
true;
9397 inOutPrevSuballocType = suballocType;
9401 return typeConflictFound || minAlignment >= bufferImageGranularity;
9407 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9408 VmaBlockMetadata(hAllocator),
9410 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9411 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9412 m_1stVectorIndex(0),
9413 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9414 m_1stNullItemsBeginCount(0),
9415 m_1stNullItemsMiddleCount(0),
9416 m_2ndNullItemsCount(0)
9420 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9424 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9426 VmaBlockMetadata::Init(size);
9427 m_SumFreeSize = size;
9430 bool VmaBlockMetadata_Linear::Validate()
const
9432 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9433 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9435 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9436 VMA_VALIDATE(!suballocations1st.empty() ||
9437 suballocations2nd.empty() ||
9438 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9440 if(!suballocations1st.empty())
9443 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9445 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9447 if(!suballocations2nd.empty())
9450 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9453 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9454 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9456 VkDeviceSize sumUsedSize = 0;
9457 const size_t suballoc1stCount = suballocations1st.size();
9458 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9460 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9462 const size_t suballoc2ndCount = suballocations2nd.size();
9463 size_t nullItem2ndCount = 0;
9464 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9466 const VmaSuballocation& suballoc = suballocations2nd[i];
9467 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9469 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9470 VMA_VALIDATE(suballoc.offset >= offset);
9474 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9475 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9476 sumUsedSize += suballoc.size;
9483 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9486 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9489 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9491 const VmaSuballocation& suballoc = suballocations1st[i];
9492 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9493 suballoc.hAllocation == VK_NULL_HANDLE);
9496 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9498 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9500 const VmaSuballocation& suballoc = suballocations1st[i];
9501 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9503 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9504 VMA_VALIDATE(suballoc.offset >= offset);
9505 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9509 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9510 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9511 sumUsedSize += suballoc.size;
9518 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9520 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9522 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9524 const size_t suballoc2ndCount = suballocations2nd.size();
9525 size_t nullItem2ndCount = 0;
9526 for(
size_t i = suballoc2ndCount; i--; )
9528 const VmaSuballocation& suballoc = suballocations2nd[i];
9529 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9531 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9532 VMA_VALIDATE(suballoc.offset >= offset);
9536 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9537 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9538 sumUsedSize += suballoc.size;
9545 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9548 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9551 VMA_VALIDATE(offset <= GetSize());
9552 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9557 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9559 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9560 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9563 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9565 const VkDeviceSize size = GetSize();
9577 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9579 switch(m_2ndVectorMode)
9581 case SECOND_VECTOR_EMPTY:
9587 const size_t suballocations1stCount = suballocations1st.size();
9588 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9589 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9590 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9592 firstSuballoc.offset,
9593 size - (lastSuballoc.offset + lastSuballoc.size));
9597 case SECOND_VECTOR_RING_BUFFER:
9602 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9603 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9604 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9605 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9609 case SECOND_VECTOR_DOUBLE_STACK:
9614 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9615 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9616 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9617 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9627 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9629 const VkDeviceSize size = GetSize();
9630 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9631 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9632 const size_t suballoc1stCount = suballocations1st.size();
9633 const size_t suballoc2ndCount = suballocations2nd.size();
9644 VkDeviceSize lastOffset = 0;
9646 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9648 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9649 size_t nextAlloc2ndIndex = 0;
9650 while(lastOffset < freeSpace2ndTo1stEnd)
9653 while(nextAlloc2ndIndex < suballoc2ndCount &&
9654 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9656 ++nextAlloc2ndIndex;
9660 if(nextAlloc2ndIndex < suballoc2ndCount)
9662 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9665 if(lastOffset < suballoc.offset)
9668 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9682 lastOffset = suballoc.offset + suballoc.size;
9683 ++nextAlloc2ndIndex;
9689 if(lastOffset < freeSpace2ndTo1stEnd)
9691 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9699 lastOffset = freeSpace2ndTo1stEnd;
9704 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9705 const VkDeviceSize freeSpace1stTo2ndEnd =
9706 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9707 while(lastOffset < freeSpace1stTo2ndEnd)
9710 while(nextAlloc1stIndex < suballoc1stCount &&
9711 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9713 ++nextAlloc1stIndex;
9717 if(nextAlloc1stIndex < suballoc1stCount)
9719 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9722 if(lastOffset < suballoc.offset)
9725 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9739 lastOffset = suballoc.offset + suballoc.size;
9740 ++nextAlloc1stIndex;
9746 if(lastOffset < freeSpace1stTo2ndEnd)
9748 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9756 lastOffset = freeSpace1stTo2ndEnd;
9760 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9762 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9763 while(lastOffset < size)
9766 while(nextAlloc2ndIndex != SIZE_MAX &&
9767 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9769 --nextAlloc2ndIndex;
9773 if(nextAlloc2ndIndex != SIZE_MAX)
9775 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9778 if(lastOffset < suballoc.offset)
9781 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9795 lastOffset = suballoc.offset + suballoc.size;
9796 --nextAlloc2ndIndex;
9802 if(lastOffset < size)
9804 const VkDeviceSize unusedRangeSize = size - lastOffset;
9820 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
9822 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9823 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9824 const VkDeviceSize size = GetSize();
9825 const size_t suballoc1stCount = suballocations1st.size();
9826 const size_t suballoc2ndCount = suballocations2nd.size();
9828 inoutStats.
size += size;
9830 VkDeviceSize lastOffset = 0;
9832 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9834 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9835 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9836 while(lastOffset < freeSpace2ndTo1stEnd)
9839 while(nextAlloc2ndIndex < suballoc2ndCount &&
9840 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9842 ++nextAlloc2ndIndex;
9846 if(nextAlloc2ndIndex < suballoc2ndCount)
9848 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9851 if(lastOffset < suballoc.offset)
9854 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9865 lastOffset = suballoc.offset + suballoc.size;
9866 ++nextAlloc2ndIndex;
9871 if(lastOffset < freeSpace2ndTo1stEnd)
9874 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9881 lastOffset = freeSpace2ndTo1stEnd;
9886 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9887 const VkDeviceSize freeSpace1stTo2ndEnd =
9888 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9889 while(lastOffset < freeSpace1stTo2ndEnd)
9892 while(nextAlloc1stIndex < suballoc1stCount &&
9893 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9895 ++nextAlloc1stIndex;
9899 if(nextAlloc1stIndex < suballoc1stCount)
9901 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9904 if(lastOffset < suballoc.offset)
9907 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9918 lastOffset = suballoc.offset + suballoc.size;
9919 ++nextAlloc1stIndex;
9924 if(lastOffset < freeSpace1stTo2ndEnd)
9927 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9934 lastOffset = freeSpace1stTo2ndEnd;
9938 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9940 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9941 while(lastOffset < size)
9944 while(nextAlloc2ndIndex != SIZE_MAX &&
9945 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9947 --nextAlloc2ndIndex;
9951 if(nextAlloc2ndIndex != SIZE_MAX)
9953 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9956 if(lastOffset < suballoc.offset)
9959 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9970 lastOffset = suballoc.offset + suballoc.size;
9971 --nextAlloc2ndIndex;
9976 if(lastOffset < size)
9979 const VkDeviceSize unusedRangeSize = size - lastOffset;
9992 #if VMA_STATS_STRING_ENABLED
9993 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
9995 const VkDeviceSize size = GetSize();
9996 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9997 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9998 const size_t suballoc1stCount = suballocations1st.size();
9999 const size_t suballoc2ndCount = suballocations2nd.size();
10003 size_t unusedRangeCount = 0;
10004 VkDeviceSize usedBytes = 0;
10006 VkDeviceSize lastOffset = 0;
10008 size_t alloc2ndCount = 0;
10009 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10011 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10012 size_t nextAlloc2ndIndex = 0;
10013 while(lastOffset < freeSpace2ndTo1stEnd)
10016 while(nextAlloc2ndIndex < suballoc2ndCount &&
10017 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10019 ++nextAlloc2ndIndex;
10023 if(nextAlloc2ndIndex < suballoc2ndCount)
10025 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10028 if(lastOffset < suballoc.offset)
10031 ++unusedRangeCount;
10037 usedBytes += suballoc.size;
10040 lastOffset = suballoc.offset + suballoc.size;
10041 ++nextAlloc2ndIndex;
10046 if(lastOffset < freeSpace2ndTo1stEnd)
10049 ++unusedRangeCount;
10053 lastOffset = freeSpace2ndTo1stEnd;
10058 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10059 size_t alloc1stCount = 0;
10060 const VkDeviceSize freeSpace1stTo2ndEnd =
10061 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10062 while(lastOffset < freeSpace1stTo2ndEnd)
10065 while(nextAlloc1stIndex < suballoc1stCount &&
10066 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10068 ++nextAlloc1stIndex;
10072 if(nextAlloc1stIndex < suballoc1stCount)
10074 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10077 if(lastOffset < suballoc.offset)
10080 ++unusedRangeCount;
10086 usedBytes += suballoc.size;
10089 lastOffset = suballoc.offset + suballoc.size;
10090 ++nextAlloc1stIndex;
10095 if(lastOffset < size)
10098 ++unusedRangeCount;
10102 lastOffset = freeSpace1stTo2ndEnd;
10106 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10108 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10109 while(lastOffset < size)
10112 while(nextAlloc2ndIndex != SIZE_MAX &&
10113 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10115 --nextAlloc2ndIndex;
10119 if(nextAlloc2ndIndex != SIZE_MAX)
10121 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10124 if(lastOffset < suballoc.offset)
10127 ++unusedRangeCount;
10133 usedBytes += suballoc.size;
10136 lastOffset = suballoc.offset + suballoc.size;
10137 --nextAlloc2ndIndex;
10142 if(lastOffset < size)
10145 ++unusedRangeCount;
10154 const VkDeviceSize unusedBytes = size - usedBytes;
10155 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
10160 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10162 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10163 size_t nextAlloc2ndIndex = 0;
10164 while(lastOffset < freeSpace2ndTo1stEnd)
10167 while(nextAlloc2ndIndex < suballoc2ndCount &&
10168 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10170 ++nextAlloc2ndIndex;
10174 if(nextAlloc2ndIndex < suballoc2ndCount)
10176 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10179 if(lastOffset < suballoc.offset)
10182 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10183 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10188 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10191 lastOffset = suballoc.offset + suballoc.size;
10192 ++nextAlloc2ndIndex;
10197 if(lastOffset < freeSpace2ndTo1stEnd)
10200 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10201 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10205 lastOffset = freeSpace2ndTo1stEnd;
10210 nextAlloc1stIndex = m_1stNullItemsBeginCount;
10211 while(lastOffset < freeSpace1stTo2ndEnd)
10214 while(nextAlloc1stIndex < suballoc1stCount &&
10215 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10217 ++nextAlloc1stIndex;
10221 if(nextAlloc1stIndex < suballoc1stCount)
10223 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10226 if(lastOffset < suballoc.offset)
10229 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10230 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10235 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10238 lastOffset = suballoc.offset + suballoc.size;
10239 ++nextAlloc1stIndex;
10244 if(lastOffset < freeSpace1stTo2ndEnd)
10247 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10248 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10252 lastOffset = freeSpace1stTo2ndEnd;
10256 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10258 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10259 while(lastOffset < size)
10262 while(nextAlloc2ndIndex != SIZE_MAX &&
10263 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10265 --nextAlloc2ndIndex;
10269 if(nextAlloc2ndIndex != SIZE_MAX)
10271 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10274 if(lastOffset < suballoc.offset)
10277 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10278 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10283 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10286 lastOffset = suballoc.offset + suballoc.size;
10287 --nextAlloc2ndIndex;
10292 if(lastOffset < size)
10295 const VkDeviceSize unusedRangeSize = size - lastOffset;
10296 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10305 PrintDetailedMap_End(json);
10307 #endif // #if VMA_STATS_STRING_ENABLED
10309 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10310 uint32_t currentFrameIndex,
10311 uint32_t frameInUseCount,
10312 VkDeviceSize bufferImageGranularity,
10313 VkDeviceSize allocSize,
10314 VkDeviceSize allocAlignment,
10316 VmaSuballocationType allocType,
10317 bool canMakeOtherLost,
10319 VmaAllocationRequest* pAllocationRequest)
10321 VMA_ASSERT(allocSize > 0);
10322 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10323 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10324 VMA_HEAVY_ASSERT(Validate());
10325 return upperAddress ?
10326 CreateAllocationRequest_UpperAddress(
10327 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10328 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10329 CreateAllocationRequest_LowerAddress(
10330 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10331 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10334 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10335 uint32_t currentFrameIndex,
10336 uint32_t frameInUseCount,
10337 VkDeviceSize bufferImageGranularity,
10338 VkDeviceSize allocSize,
10339 VkDeviceSize allocAlignment,
10340 VmaSuballocationType allocType,
10341 bool canMakeOtherLost,
10343 VmaAllocationRequest* pAllocationRequest)
10345 const VkDeviceSize size = GetSize();
10346 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10347 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10349 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10351 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10356 if(allocSize > size)
10360 VkDeviceSize resultBaseOffset = size - allocSize;
10361 if(!suballocations2nd.empty())
10363 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10364 resultBaseOffset = lastSuballoc.offset - allocSize;
10365 if(allocSize > lastSuballoc.offset)
10372 VkDeviceSize resultOffset = resultBaseOffset;
10375 if(VMA_DEBUG_MARGIN > 0)
10377 if(resultOffset < VMA_DEBUG_MARGIN)
10381 resultOffset -= VMA_DEBUG_MARGIN;
10385 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10389 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10391 bool bufferImageGranularityConflict =
false;
10392 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10394 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10395 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10397 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10399 bufferImageGranularityConflict =
true;
10407 if(bufferImageGranularityConflict)
10409 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10414 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10415 suballocations1st.back().offset + suballocations1st.back().size :
10417 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10421 if(bufferImageGranularity > 1)
10423 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10425 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10426 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10428 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10442 pAllocationRequest->offset = resultOffset;
10443 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10444 pAllocationRequest->sumItemSize = 0;
10446 pAllocationRequest->itemsToMakeLostCount = 0;
10447 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10454 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10455 uint32_t currentFrameIndex,
10456 uint32_t frameInUseCount,
10457 VkDeviceSize bufferImageGranularity,
10458 VkDeviceSize allocSize,
10459 VkDeviceSize allocAlignment,
10460 VmaSuballocationType allocType,
10461 bool canMakeOtherLost,
10463 VmaAllocationRequest* pAllocationRequest)
10465 const VkDeviceSize size = GetSize();
10466 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10467 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10469 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10473 VkDeviceSize resultBaseOffset = 0;
10474 if(!suballocations1st.empty())
10476 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10477 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10481 VkDeviceSize resultOffset = resultBaseOffset;
10484 if(VMA_DEBUG_MARGIN > 0)
10486 resultOffset += VMA_DEBUG_MARGIN;
10490 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10494 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10496 bool bufferImageGranularityConflict =
false;
10497 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10499 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10500 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10502 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10504 bufferImageGranularityConflict =
true;
10512 if(bufferImageGranularityConflict)
10514 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10518 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10519 suballocations2nd.back().offset : size;
10522 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10526 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10528 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10530 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10531 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10533 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10547 pAllocationRequest->offset = resultOffset;
10548 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10549 pAllocationRequest->sumItemSize = 0;
10551 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10552 pAllocationRequest->itemsToMakeLostCount = 0;
10559 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10561 VMA_ASSERT(!suballocations1st.empty());
10563 VkDeviceSize resultBaseOffset = 0;
10564 if(!suballocations2nd.empty())
10566 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10567 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10571 VkDeviceSize resultOffset = resultBaseOffset;
10574 if(VMA_DEBUG_MARGIN > 0)
10576 resultOffset += VMA_DEBUG_MARGIN;
10580 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10584 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10586 bool bufferImageGranularityConflict =
false;
10587 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10589 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10590 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10592 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10594 bufferImageGranularityConflict =
true;
10602 if(bufferImageGranularityConflict)
10604 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10608 pAllocationRequest->itemsToMakeLostCount = 0;
10609 pAllocationRequest->sumItemSize = 0;
10610 size_t index1st = m_1stNullItemsBeginCount;
10612 if(canMakeOtherLost)
10614 while(index1st < suballocations1st.size() &&
10615 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10618 const VmaSuballocation& suballoc = suballocations1st[index1st];
10619 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10625 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10626 if(suballoc.hAllocation->CanBecomeLost() &&
10627 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10629 ++pAllocationRequest->itemsToMakeLostCount;
10630 pAllocationRequest->sumItemSize += suballoc.size;
10642 if(bufferImageGranularity > 1)
10644 while(index1st < suballocations1st.size())
10646 const VmaSuballocation& suballoc = suballocations1st[index1st];
10647 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10649 if(suballoc.hAllocation != VK_NULL_HANDLE)
10652 if(suballoc.hAllocation->CanBecomeLost() &&
10653 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10655 ++pAllocationRequest->itemsToMakeLostCount;
10656 pAllocationRequest->sumItemSize += suballoc.size;
10674 if(index1st == suballocations1st.size() &&
10675 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10678 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10683 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10684 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10688 if(bufferImageGranularity > 1)
10690 for(
size_t nextSuballocIndex = index1st;
10691 nextSuballocIndex < suballocations1st.size();
10692 nextSuballocIndex++)
10694 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10695 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10697 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10711 pAllocationRequest->offset = resultOffset;
10712 pAllocationRequest->sumFreeSize =
10713 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10715 - pAllocationRequest->sumItemSize;
10716 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10725 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10726 uint32_t currentFrameIndex,
10727 uint32_t frameInUseCount,
10728 VmaAllocationRequest* pAllocationRequest)
10730 if(pAllocationRequest->itemsToMakeLostCount == 0)
10735 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10738 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10739 size_t index = m_1stNullItemsBeginCount;
10740 size_t madeLostCount = 0;
10741 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10743 if(index == suballocations->size())
10747 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10749 suballocations = &AccessSuballocations2nd();
10753 VMA_ASSERT(!suballocations->empty());
10755 VmaSuballocation& suballoc = (*suballocations)[index];
10756 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10758 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10759 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10760 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10762 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10763 suballoc.hAllocation = VK_NULL_HANDLE;
10764 m_SumFreeSize += suballoc.size;
10765 if(suballocations == &AccessSuballocations1st())
10767 ++m_1stNullItemsMiddleCount;
10771 ++m_2ndNullItemsCount;
10783 CleanupAfterFree();
10789 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10791 uint32_t lostAllocationCount = 0;
10793 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10794 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10796 VmaSuballocation& suballoc = suballocations1st[i];
10797 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10798 suballoc.hAllocation->CanBecomeLost() &&
10799 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10801 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10802 suballoc.hAllocation = VK_NULL_HANDLE;
10803 ++m_1stNullItemsMiddleCount;
10804 m_SumFreeSize += suballoc.size;
10805 ++lostAllocationCount;
10809 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10810 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10812 VmaSuballocation& suballoc = suballocations2nd[i];
10813 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10814 suballoc.hAllocation->CanBecomeLost() &&
10815 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10817 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10818 suballoc.hAllocation = VK_NULL_HANDLE;
10819 ++m_2ndNullItemsCount;
10820 m_SumFreeSize += suballoc.size;
10821 ++lostAllocationCount;
10825 if(lostAllocationCount)
10827 CleanupAfterFree();
10830 return lostAllocationCount;
10833 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10835 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10836 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10838 const VmaSuballocation& suballoc = suballocations1st[i];
10839 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10841 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10843 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10844 return VK_ERROR_VALIDATION_FAILED_EXT;
10846 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10848 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10849 return VK_ERROR_VALIDATION_FAILED_EXT;
10854 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10855 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10857 const VmaSuballocation& suballoc = suballocations2nd[i];
10858 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10860 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10862 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10863 return VK_ERROR_VALIDATION_FAILED_EXT;
10865 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10867 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10868 return VK_ERROR_VALIDATION_FAILED_EXT;
10876 void VmaBlockMetadata_Linear::Alloc(
10877 const VmaAllocationRequest& request,
10878 VmaSuballocationType type,
10879 VkDeviceSize allocSize,
10882 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10884 switch(request.type)
10886 case VmaAllocationRequestType::UpperAddress:
10888 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10889 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10890 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10891 suballocations2nd.push_back(newSuballoc);
10892 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10895 case VmaAllocationRequestType::EndOf1st:
10897 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10899 VMA_ASSERT(suballocations1st.empty() ||
10900 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10902 VMA_ASSERT(request.offset + allocSize <= GetSize());
10904 suballocations1st.push_back(newSuballoc);
10907 case VmaAllocationRequestType::EndOf2nd:
10909 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10911 VMA_ASSERT(!suballocations1st.empty() &&
10912 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10913 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10915 switch(m_2ndVectorMode)
10917 case SECOND_VECTOR_EMPTY:
10919 VMA_ASSERT(suballocations2nd.empty());
10920 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10922 case SECOND_VECTOR_RING_BUFFER:
10924 VMA_ASSERT(!suballocations2nd.empty());
10926 case SECOND_VECTOR_DOUBLE_STACK:
10927 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10933 suballocations2nd.push_back(newSuballoc);
10937 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10940 m_SumFreeSize -= newSuballoc.size;
10943 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10945 FreeAtOffset(allocation->GetOffset());
10948 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10950 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10951 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10953 if(!suballocations1st.empty())
10956 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10957 if(firstSuballoc.offset == offset)
10959 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10960 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10961 m_SumFreeSize += firstSuballoc.size;
10962 ++m_1stNullItemsBeginCount;
10963 CleanupAfterFree();
10969 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10970 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10972 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10973 if(lastSuballoc.offset == offset)
10975 m_SumFreeSize += lastSuballoc.size;
10976 suballocations2nd.pop_back();
10977 CleanupAfterFree();
10982 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10984 VmaSuballocation& lastSuballoc = suballocations1st.back();
10985 if(lastSuballoc.offset == offset)
10987 m_SumFreeSize += lastSuballoc.size;
10988 suballocations1st.pop_back();
10989 CleanupAfterFree();
10996 VmaSuballocation refSuballoc;
10997 refSuballoc.offset = offset;
10999 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11000 suballocations1st.begin() + m_1stNullItemsBeginCount,
11001 suballocations1st.end(),
11003 VmaSuballocationOffsetLess());
11004 if(it != suballocations1st.end())
11006 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11007 it->hAllocation = VK_NULL_HANDLE;
11008 ++m_1stNullItemsMiddleCount;
11009 m_SumFreeSize += it->size;
11010 CleanupAfterFree();
11015 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11018 VmaSuballocation refSuballoc;
11019 refSuballoc.offset = offset;
11021 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11022 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11023 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11024 if(it != suballocations2nd.end())
11026 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11027 it->hAllocation = VK_NULL_HANDLE;
11028 ++m_2ndNullItemsCount;
11029 m_SumFreeSize += it->size;
11030 CleanupAfterFree();
11035 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11038 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11040 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11041 const size_t suballocCount = AccessSuballocations1st().size();
11042 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11045 void VmaBlockMetadata_Linear::CleanupAfterFree()
11047 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11048 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11052 suballocations1st.clear();
11053 suballocations2nd.clear();
11054 m_1stNullItemsBeginCount = 0;
11055 m_1stNullItemsMiddleCount = 0;
11056 m_2ndNullItemsCount = 0;
11057 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11061 const size_t suballoc1stCount = suballocations1st.size();
11062 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11063 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11066 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11067 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11069 ++m_1stNullItemsBeginCount;
11070 --m_1stNullItemsMiddleCount;
11074 while(m_1stNullItemsMiddleCount > 0 &&
11075 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11077 --m_1stNullItemsMiddleCount;
11078 suballocations1st.pop_back();
11082 while(m_2ndNullItemsCount > 0 &&
11083 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11085 --m_2ndNullItemsCount;
11086 suballocations2nd.pop_back();
11090 while(m_2ndNullItemsCount > 0 &&
11091 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11093 --m_2ndNullItemsCount;
11094 VmaVectorRemove(suballocations2nd, 0);
11097 if(ShouldCompact1st())
11099 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11100 size_t srcIndex = m_1stNullItemsBeginCount;
11101 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11103 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11107 if(dstIndex != srcIndex)
11109 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11113 suballocations1st.resize(nonNullItemCount);
11114 m_1stNullItemsBeginCount = 0;
11115 m_1stNullItemsMiddleCount = 0;
11119 if(suballocations2nd.empty())
11121 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11125 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11127 suballocations1st.clear();
11128 m_1stNullItemsBeginCount = 0;
11130 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11133 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11134 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11135 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
11136 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11138 ++m_1stNullItemsBeginCount;
11139 --m_1stNullItemsMiddleCount;
11141 m_2ndNullItemsCount = 0;
11142 m_1stVectorIndex ^= 1;
11147 VMA_HEAVY_ASSERT(Validate());
11154 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
11155 VmaBlockMetadata(hAllocator),
11157 m_AllocationCount(0),
11161 memset(m_FreeList, 0,
sizeof(m_FreeList));
11164 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
11166 DeleteNode(m_Root);
11169 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
11171 VmaBlockMetadata::Init(size);
11173 m_UsableSize = VmaPrevPow2(size);
11174 m_SumFreeSize = m_UsableSize;
11178 while(m_LevelCount < MAX_LEVELS &&
11179 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
11184 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
11185 rootNode->offset = 0;
11186 rootNode->type = Node::TYPE_FREE;
11187 rootNode->parent = VMA_NULL;
11188 rootNode->buddy = VMA_NULL;
11191 AddToFreeListFront(0, rootNode);
11194 bool VmaBlockMetadata_Buddy::Validate()
const
11197 ValidationContext ctx;
11198 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
11200 VMA_VALIDATE(
false &&
"ValidateNode failed.");
11202 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
11203 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
11206 for(uint32_t level = 0; level < m_LevelCount; ++level)
11208 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
11209 m_FreeList[level].front->free.prev == VMA_NULL);
11211 for(Node* node = m_FreeList[level].front;
11213 node = node->free.next)
11215 VMA_VALIDATE(node->type == Node::TYPE_FREE);
11217 if(node->free.next == VMA_NULL)
11219 VMA_VALIDATE(m_FreeList[level].back == node);
11223 VMA_VALIDATE(node->free.next->free.prev == node);
11229 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
11231 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
11237 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
11239 for(uint32_t level = 0; level < m_LevelCount; ++level)
11241 if(m_FreeList[level].front != VMA_NULL)
11243 return LevelToNodeSize(level);
11249 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
11251 const VkDeviceSize unusableSize = GetUnusableSize();
11262 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11264 if(unusableSize > 0)
11273 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11275 const VkDeviceSize unusableSize = GetUnusableSize();
11277 inoutStats.
size += GetSize();
11278 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11283 if(unusableSize > 0)
11290 #if VMA_STATS_STRING_ENABLED
11292 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11296 CalcAllocationStatInfo(stat);
11298 PrintDetailedMap_Begin(
11304 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11306 const VkDeviceSize unusableSize = GetUnusableSize();
11307 if(unusableSize > 0)
11309 PrintDetailedMap_UnusedRange(json,
11314 PrintDetailedMap_End(json);
11317 #endif // #if VMA_STATS_STRING_ENABLED
11319 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11320 uint32_t currentFrameIndex,
11321 uint32_t frameInUseCount,
11322 VkDeviceSize bufferImageGranularity,
11323 VkDeviceSize allocSize,
11324 VkDeviceSize allocAlignment,
11326 VmaSuballocationType allocType,
11327 bool canMakeOtherLost,
11329 VmaAllocationRequest* pAllocationRequest)
11331 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11335 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11336 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11337 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11339 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11340 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11343 if(allocSize > m_UsableSize)
11348 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11349 for(uint32_t level = targetLevel + 1; level--; )
11351 for(Node* freeNode = m_FreeList[level].front;
11352 freeNode != VMA_NULL;
11353 freeNode = freeNode->free.next)
11355 if(freeNode->offset % allocAlignment == 0)
11357 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11358 pAllocationRequest->offset = freeNode->offset;
11359 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11360 pAllocationRequest->sumItemSize = 0;
11361 pAllocationRequest->itemsToMakeLostCount = 0;
11362 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11371 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11372 uint32_t currentFrameIndex,
11373 uint32_t frameInUseCount,
11374 VmaAllocationRequest* pAllocationRequest)
11380 return pAllocationRequest->itemsToMakeLostCount == 0;
11383 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11392 void VmaBlockMetadata_Buddy::Alloc(
11393 const VmaAllocationRequest& request,
11394 VmaSuballocationType type,
11395 VkDeviceSize allocSize,
11398 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11400 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11401 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11403 Node* currNode = m_FreeList[currLevel].front;
11404 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11405 while(currNode->offset != request.offset)
11407 currNode = currNode->free.next;
11408 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11412 while(currLevel < targetLevel)
11416 RemoveFromFreeList(currLevel, currNode);
11418 const uint32_t childrenLevel = currLevel + 1;
11421 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11422 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11424 leftChild->offset = currNode->offset;
11425 leftChild->type = Node::TYPE_FREE;
11426 leftChild->parent = currNode;
11427 leftChild->buddy = rightChild;
11429 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11430 rightChild->type = Node::TYPE_FREE;
11431 rightChild->parent = currNode;
11432 rightChild->buddy = leftChild;
11435 currNode->type = Node::TYPE_SPLIT;
11436 currNode->split.leftChild = leftChild;
11439 AddToFreeListFront(childrenLevel, rightChild);
11440 AddToFreeListFront(childrenLevel, leftChild);
11445 currNode = m_FreeList[currLevel].front;
11454 VMA_ASSERT(currLevel == targetLevel &&
11455 currNode != VMA_NULL &&
11456 currNode->type == Node::TYPE_FREE);
11457 RemoveFromFreeList(currLevel, currNode);
11460 currNode->type = Node::TYPE_ALLOCATION;
11461 currNode->allocation.alloc = hAllocation;
11463 ++m_AllocationCount;
11465 m_SumFreeSize -= allocSize;
11468 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11470 if(node->type == Node::TYPE_SPLIT)
11472 DeleteNode(node->split.leftChild->buddy);
11473 DeleteNode(node->split.leftChild);
11476 vma_delete(GetAllocationCallbacks(), node);
11479 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11481 VMA_VALIDATE(level < m_LevelCount);
11482 VMA_VALIDATE(curr->parent == parent);
11483 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11484 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11487 case Node::TYPE_FREE:
11489 ctx.calculatedSumFreeSize += levelNodeSize;
11490 ++ctx.calculatedFreeCount;
11492 case Node::TYPE_ALLOCATION:
11493 ++ctx.calculatedAllocationCount;
11494 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11495 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11497 case Node::TYPE_SPLIT:
11499 const uint32_t childrenLevel = level + 1;
11500 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11501 const Node*
const leftChild = curr->split.leftChild;
11502 VMA_VALIDATE(leftChild != VMA_NULL);
11503 VMA_VALIDATE(leftChild->offset == curr->offset);
11504 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11506 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11508 const Node*
const rightChild = leftChild->buddy;
11509 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11510 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11512 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11523 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11526 uint32_t level = 0;
11527 VkDeviceSize currLevelNodeSize = m_UsableSize;
11528 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11529 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11532 currLevelNodeSize = nextLevelNodeSize;
11533 nextLevelNodeSize = currLevelNodeSize >> 1;
11538 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11541 Node* node = m_Root;
11542 VkDeviceSize nodeOffset = 0;
11543 uint32_t level = 0;
11544 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11545 while(node->type == Node::TYPE_SPLIT)
11547 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11548 if(offset < nodeOffset + nextLevelSize)
11550 node = node->split.leftChild;
11554 node = node->split.leftChild->buddy;
11555 nodeOffset += nextLevelSize;
11558 levelNodeSize = nextLevelSize;
11561 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11562 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11565 --m_AllocationCount;
11566 m_SumFreeSize += alloc->GetSize();
11568 node->type = Node::TYPE_FREE;
11571 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11573 RemoveFromFreeList(level, node->buddy);
11574 Node*
const parent = node->parent;
11576 vma_delete(GetAllocationCallbacks(), node->buddy);
11577 vma_delete(GetAllocationCallbacks(), node);
11578 parent->type = Node::TYPE_FREE;
11586 AddToFreeListFront(level, node);
11589 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11593 case Node::TYPE_FREE:
11599 case Node::TYPE_ALLOCATION:
11601 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11607 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11608 if(unusedRangeSize > 0)
11617 case Node::TYPE_SPLIT:
11619 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11620 const Node*
const leftChild = node->split.leftChild;
11621 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11622 const Node*
const rightChild = leftChild->buddy;
11623 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11631 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11633 VMA_ASSERT(node->type == Node::TYPE_FREE);
11636 Node*
const frontNode = m_FreeList[level].front;
11637 if(frontNode == VMA_NULL)
11639 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11640 node->free.prev = node->free.next = VMA_NULL;
11641 m_FreeList[level].front = m_FreeList[level].back = node;
11645 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11646 node->free.prev = VMA_NULL;
11647 node->free.next = frontNode;
11648 frontNode->free.prev = node;
11649 m_FreeList[level].front = node;
11653 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11655 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11658 if(node->free.prev == VMA_NULL)
11660 VMA_ASSERT(m_FreeList[level].front == node);
11661 m_FreeList[level].front = node->free.next;
11665 Node*
const prevFreeNode = node->free.prev;
11666 VMA_ASSERT(prevFreeNode->free.next == node);
11667 prevFreeNode->free.next = node->free.next;
11671 if(node->free.next == VMA_NULL)
11673 VMA_ASSERT(m_FreeList[level].back == node);
11674 m_FreeList[level].back = node->free.prev;
11678 Node*
const nextFreeNode = node->free.next;
11679 VMA_ASSERT(nextFreeNode->free.prev == node);
11680 nextFreeNode->free.prev = node->free.prev;
11684 #if VMA_STATS_STRING_ENABLED
11685 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
11689 case Node::TYPE_FREE:
11690 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11692 case Node::TYPE_ALLOCATION:
11694 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11695 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11696 if(allocSize < levelNodeSize)
11698 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11702 case Node::TYPE_SPLIT:
11704 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11705 const Node*
const leftChild = node->split.leftChild;
11706 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11707 const Node*
const rightChild = leftChild->buddy;
11708 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11715 #endif // #if VMA_STATS_STRING_ENABLED
11721 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11722 m_pMetadata(VMA_NULL),
11723 m_MemoryTypeIndex(UINT32_MAX),
11725 m_hMemory(VK_NULL_HANDLE),
11727 m_pMappedData(VMA_NULL)
11731 void VmaDeviceMemoryBlock::Init(
11734 uint32_t newMemoryTypeIndex,
11735 VkDeviceMemory newMemory,
11736 VkDeviceSize newSize,
11738 uint32_t algorithm)
11740 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11742 m_hParentPool = hParentPool;
11743 m_MemoryTypeIndex = newMemoryTypeIndex;
11745 m_hMemory = newMemory;
11750 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11753 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11759 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11761 m_pMetadata->Init(newSize);
11764 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11768 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11770 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11771 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11772 m_hMemory = VK_NULL_HANDLE;
11774 vma_delete(allocator, m_pMetadata);
11775 m_pMetadata = VMA_NULL;
11778 bool VmaDeviceMemoryBlock::Validate()
const
11780 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11781 (m_pMetadata->GetSize() != 0));
11783 return m_pMetadata->Validate();
11786 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11788 void* pData =
nullptr;
11789 VkResult res = Map(hAllocator, 1, &pData);
11790 if(res != VK_SUCCESS)
11795 res = m_pMetadata->CheckCorruption(pData);
11797 Unmap(hAllocator, 1);
11802 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11809 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11810 if(m_MapCount != 0)
11812 m_MapCount += count;
11813 VMA_ASSERT(m_pMappedData != VMA_NULL);
11814 if(ppData != VMA_NULL)
11816 *ppData = m_pMappedData;
11822 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11823 hAllocator->m_hDevice,
11829 if(result == VK_SUCCESS)
11831 if(ppData != VMA_NULL)
11833 *ppData = m_pMappedData;
11835 m_MapCount = count;
11841 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11848 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11849 if(m_MapCount >= count)
11851 m_MapCount -= count;
11852 if(m_MapCount == 0)
11854 m_pMappedData = VMA_NULL;
11855 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11860 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11864 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11866 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11867 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11870 VkResult res = Map(hAllocator, 1, &pData);
11871 if(res != VK_SUCCESS)
11876 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11877 VmaWriteMagicValue(pData, allocOffset + allocSize);
11879 Unmap(hAllocator, 1);
11884 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11886 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11887 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11890 VkResult res = Map(hAllocator, 1, &pData);
11891 if(res != VK_SUCCESS)
11896 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11898 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11900 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11902 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11905 Unmap(hAllocator, 1);
11910 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11913 VkDeviceSize allocationLocalOffset,
11917 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11918 hAllocation->GetBlock() ==
this);
11919 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11920 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11921 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11923 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11924 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11927 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11930 VkDeviceSize allocationLocalOffset,
11934 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11935 hAllocation->GetBlock() ==
this);
11936 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11937 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11938 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11940 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11941 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11946 memset(&outInfo, 0,
sizeof(outInfo));
11965 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11973 VmaPool_T::VmaPool_T(
11976 VkDeviceSize preferredBlockSize) :
11980 createInfo.memoryTypeIndex,
11981 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11982 createInfo.minBlockCount,
11983 createInfo.maxBlockCount,
11985 createInfo.frameInUseCount,
11986 createInfo.blockSize != 0,
11993 VmaPool_T::~VmaPool_T()
11997 void VmaPool_T::SetName(
const char* pName)
11999 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12000 VmaFreeString(allocs, m_Name);
12002 if(pName != VMA_NULL)
12004 m_Name = VmaCreateStringCopy(allocs, pName);
12012 #if VMA_STATS_STRING_ENABLED
12014 #endif // #if VMA_STATS_STRING_ENABLED
12016 VmaBlockVector::VmaBlockVector(
12019 uint32_t memoryTypeIndex,
12020 VkDeviceSize preferredBlockSize,
12021 size_t minBlockCount,
12022 size_t maxBlockCount,
12023 VkDeviceSize bufferImageGranularity,
12024 uint32_t frameInUseCount,
12025 bool explicitBlockSize,
12026 uint32_t algorithm) :
12027 m_hAllocator(hAllocator),
12028 m_hParentPool(hParentPool),
12029 m_MemoryTypeIndex(memoryTypeIndex),
12030 m_PreferredBlockSize(preferredBlockSize),
12031 m_MinBlockCount(minBlockCount),
12032 m_MaxBlockCount(maxBlockCount),
12033 m_BufferImageGranularity(bufferImageGranularity),
12034 m_FrameInUseCount(frameInUseCount),
12035 m_ExplicitBlockSize(explicitBlockSize),
12036 m_Algorithm(algorithm),
12037 m_HasEmptyBlock(false),
12038 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12043 VmaBlockVector::~VmaBlockVector()
12045 for(
size_t i = m_Blocks.size(); i--; )
12047 m_Blocks[i]->Destroy(m_hAllocator);
12048 vma_delete(m_hAllocator, m_Blocks[i]);
12052 VkResult VmaBlockVector::CreateMinBlocks()
12054 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12056 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12057 if(res != VK_SUCCESS)
12065 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12067 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12069 const size_t blockCount = m_Blocks.size();
12078 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12080 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12081 VMA_ASSERT(pBlock);
12082 VMA_HEAVY_ASSERT(pBlock->Validate());
12083 pBlock->m_pMetadata->AddPoolStats(*pStats);
12087 bool VmaBlockVector::IsEmpty()
12089 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12090 return m_Blocks.empty();
12093 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12095 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12096 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12097 (VMA_DEBUG_MARGIN > 0) &&
12099 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12102 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12104 VkResult VmaBlockVector::Allocate(
12105 uint32_t currentFrameIndex,
12107 VkDeviceSize alignment,
12109 VmaSuballocationType suballocType,
12110 size_t allocationCount,
12114 VkResult res = VK_SUCCESS;
12116 if(IsCorruptionDetectionEnabled())
12118 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12119 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12123 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12124 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12126 res = AllocatePage(
12132 pAllocations + allocIndex);
12133 if(res != VK_SUCCESS)
12140 if(res != VK_SUCCESS)
12143 while(allocIndex--)
12145 Free(pAllocations[allocIndex]);
12147 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12153 VkResult VmaBlockVector::AllocatePage(
12154 uint32_t currentFrameIndex,
12156 VkDeviceSize alignment,
12158 VmaSuballocationType suballocType,
12166 VkDeviceSize freeMemory;
12168 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12170 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12174 const bool canFallbackToDedicated = !IsCustomPool();
12175 const bool canCreateNewBlock =
12177 (m_Blocks.size() < m_MaxBlockCount) &&
12178 (freeMemory >= size || !canFallbackToDedicated);
12185 canMakeOtherLost =
false;
12189 if(isUpperAddress &&
12192 return VK_ERROR_FEATURE_NOT_PRESENT;
12206 return VK_ERROR_FEATURE_NOT_PRESENT;
12210 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12212 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12220 if(!canMakeOtherLost || canCreateNewBlock)
12229 if(!m_Blocks.empty())
12231 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12232 VMA_ASSERT(pCurrBlock);
12233 VkResult res = AllocateFromBlock(
12243 if(res == VK_SUCCESS)
12245 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12255 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12257 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12258 VMA_ASSERT(pCurrBlock);
12259 VkResult res = AllocateFromBlock(
12269 if(res == VK_SUCCESS)
12271 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12279 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12281 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12282 VMA_ASSERT(pCurrBlock);
12283 VkResult res = AllocateFromBlock(
12293 if(res == VK_SUCCESS)
12295 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12303 if(canCreateNewBlock)
12306 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12307 uint32_t newBlockSizeShift = 0;
12308 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12310 if(!m_ExplicitBlockSize)
12313 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12314 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12316 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12317 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12319 newBlockSize = smallerNewBlockSize;
12320 ++newBlockSizeShift;
12329 size_t newBlockIndex = 0;
12330 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12331 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12333 if(!m_ExplicitBlockSize)
12335 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12337 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12338 if(smallerNewBlockSize >= size)
12340 newBlockSize = smallerNewBlockSize;
12341 ++newBlockSizeShift;
12342 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12343 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12352 if(res == VK_SUCCESS)
12354 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12355 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12357 res = AllocateFromBlock(
12367 if(res == VK_SUCCESS)
12369 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12375 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12382 if(canMakeOtherLost)
12384 uint32_t tryIndex = 0;
12385 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12387 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12388 VmaAllocationRequest bestRequest = {};
12389 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12395 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12397 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12398 VMA_ASSERT(pCurrBlock);
12399 VmaAllocationRequest currRequest = {};
12400 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12403 m_BufferImageGranularity,
12412 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12413 if(pBestRequestBlock == VMA_NULL ||
12414 currRequestCost < bestRequestCost)
12416 pBestRequestBlock = pCurrBlock;
12417 bestRequest = currRequest;
12418 bestRequestCost = currRequestCost;
12420 if(bestRequestCost == 0)
12431 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12433 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12434 VMA_ASSERT(pCurrBlock);
12435 VmaAllocationRequest currRequest = {};
12436 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12439 m_BufferImageGranularity,
12448 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12449 if(pBestRequestBlock == VMA_NULL ||
12450 currRequestCost < bestRequestCost ||
12453 pBestRequestBlock = pCurrBlock;
12454 bestRequest = currRequest;
12455 bestRequestCost = currRequestCost;
12457 if(bestRequestCost == 0 ||
12467 if(pBestRequestBlock != VMA_NULL)
12471 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12472 if(res != VK_SUCCESS)
12478 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12484 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12485 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12486 UpdateHasEmptyBlock();
12487 (*pAllocation)->InitBlockAllocation(
12489 bestRequest.offset,
12496 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12497 VMA_DEBUG_LOG(
" Returned from existing block");
12498 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12499 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12500 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12502 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12504 if(IsCorruptionDetectionEnabled())
12506 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12507 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12522 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12524 return VK_ERROR_TOO_MANY_OBJECTS;
12528 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12531 void VmaBlockVector::Free(
12534 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12536 bool budgetExceeded =
false;
12538 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12540 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12541 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12546 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12548 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12550 if(IsCorruptionDetectionEnabled())
12552 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12553 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12556 if(hAllocation->IsPersistentMap())
12558 pBlock->Unmap(m_hAllocator, 1);
12561 pBlock->m_pMetadata->Free(hAllocation);
12562 VMA_HEAVY_ASSERT(pBlock->Validate());
12564 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12566 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12568 if(pBlock->m_pMetadata->IsEmpty())
12571 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12573 pBlockToDelete = pBlock;
12580 else if(m_HasEmptyBlock && canDeleteBlock)
12582 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12583 if(pLastBlock->m_pMetadata->IsEmpty())
12585 pBlockToDelete = pLastBlock;
12586 m_Blocks.pop_back();
12590 UpdateHasEmptyBlock();
12591 IncrementallySortBlocks();
12596 if(pBlockToDelete != VMA_NULL)
12598 VMA_DEBUG_LOG(
" Deleted empty block");
12599 pBlockToDelete->Destroy(m_hAllocator);
12600 vma_delete(m_hAllocator, pBlockToDelete);
12604 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12606 VkDeviceSize result = 0;
12607 for(
size_t i = m_Blocks.size(); i--; )
12609 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12610 if(result >= m_PreferredBlockSize)
12618 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12620 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12622 if(m_Blocks[blockIndex] == pBlock)
12624 VmaVectorRemove(m_Blocks, blockIndex);
12631 void VmaBlockVector::IncrementallySortBlocks()
12636 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12638 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12640 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12647 VkResult VmaBlockVector::AllocateFromBlock(
12648 VmaDeviceMemoryBlock* pBlock,
12649 uint32_t currentFrameIndex,
12651 VkDeviceSize alignment,
12654 VmaSuballocationType suballocType,
12663 VmaAllocationRequest currRequest = {};
12664 if(pBlock->m_pMetadata->CreateAllocationRequest(
12667 m_BufferImageGranularity,
12677 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12681 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12682 if(res != VK_SUCCESS)
12688 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12689 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12690 UpdateHasEmptyBlock();
12691 (*pAllocation)->InitBlockAllocation(
12693 currRequest.offset,
12700 VMA_HEAVY_ASSERT(pBlock->Validate());
12701 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12702 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12703 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12705 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12707 if(IsCorruptionDetectionEnabled())
12709 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12710 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12714 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12717 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12719 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12720 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12721 allocInfo.allocationSize = blockSize;
12723 #if VMA_BUFFER_DEVICE_ADDRESS
12725 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
12726 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
12728 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
12729 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
12731 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
12733 VkDeviceMemory mem = VK_NULL_HANDLE;
12734 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12743 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12749 allocInfo.allocationSize,
12753 m_Blocks.push_back(pBlock);
12754 if(pNewBlockIndex != VMA_NULL)
12756 *pNewBlockIndex = m_Blocks.size() - 1;
12762 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12763 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12764 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12766 const size_t blockCount = m_Blocks.size();
12767 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12771 BLOCK_FLAG_USED = 0x00000001,
12772 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12780 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12781 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12782 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12785 const size_t moveCount = moves.size();
12786 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12788 const VmaDefragmentationMove& move = moves[moveIndex];
12789 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12790 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12793 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12796 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12798 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12799 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12800 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12802 currBlockInfo.pMappedData = pBlock->GetMappedData();
12804 if(currBlockInfo.pMappedData == VMA_NULL)
12806 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12807 if(pDefragCtx->res == VK_SUCCESS)
12809 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12816 if(pDefragCtx->res == VK_SUCCESS)
12818 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12819 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12821 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12823 const VmaDefragmentationMove& move = moves[moveIndex];
12825 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12826 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12828 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12833 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12834 memRange.memory = pSrcBlock->GetDeviceMemory();
12835 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12836 memRange.size = VMA_MIN(
12837 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12838 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12839 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12844 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
12845 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
12846 static_cast<size_t>(move.size));
12848 if(IsCorruptionDetectionEnabled())
12850 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12851 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12857 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12858 memRange.memory = pDstBlock->GetDeviceMemory();
12859 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12860 memRange.size = VMA_MIN(
12861 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12862 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12863 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12870 for(
size_t blockIndex = blockCount; blockIndex--; )
12872 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12873 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12875 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12876 pBlock->Unmap(m_hAllocator, 1);
12881 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12882 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12883 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12884 VkCommandBuffer commandBuffer)
12886 const size_t blockCount = m_Blocks.size();
12888 pDefragCtx->blockContexts.resize(blockCount);
12889 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12892 const size_t moveCount = moves.size();
12893 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12895 const VmaDefragmentationMove& move = moves[moveIndex];
12900 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12901 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12905 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12909 VkBufferCreateInfo bufCreateInfo;
12910 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12912 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12914 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12915 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12916 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12918 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12919 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12920 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12921 if(pDefragCtx->res == VK_SUCCESS)
12923 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12924 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12931 if(pDefragCtx->res == VK_SUCCESS)
12933 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12935 const VmaDefragmentationMove& move = moves[moveIndex];
12937 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12938 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12940 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12942 VkBufferCopy region = {
12946 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12947 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12952 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12954 pDefragCtx->res = VK_NOT_READY;
12960 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12962 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12963 if(pBlock->m_pMetadata->IsEmpty())
12965 if(m_Blocks.size() > m_MinBlockCount)
12967 if(pDefragmentationStats != VMA_NULL)
12970 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12973 VmaVectorRemove(m_Blocks, blockIndex);
12974 pBlock->Destroy(m_hAllocator);
12975 vma_delete(m_hAllocator, pBlock);
12983 UpdateHasEmptyBlock();
12986 void VmaBlockVector::UpdateHasEmptyBlock()
12988 m_HasEmptyBlock =
false;
12989 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
12991 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
12992 if(pBlock->m_pMetadata->IsEmpty())
12994 m_HasEmptyBlock =
true;
13000 #if VMA_STATS_STRING_ENABLED
13002 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13004 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13006 json.BeginObject();
13010 const char* poolName = m_hParentPool->GetName();
13011 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13013 json.WriteString(
"Name");
13014 json.WriteString(poolName);
13017 json.WriteString(
"MemoryTypeIndex");
13018 json.WriteNumber(m_MemoryTypeIndex);
13020 json.WriteString(
"BlockSize");
13021 json.WriteNumber(m_PreferredBlockSize);
13023 json.WriteString(
"BlockCount");
13024 json.BeginObject(
true);
13025 if(m_MinBlockCount > 0)
13027 json.WriteString(
"Min");
13028 json.WriteNumber((uint64_t)m_MinBlockCount);
13030 if(m_MaxBlockCount < SIZE_MAX)
13032 json.WriteString(
"Max");
13033 json.WriteNumber((uint64_t)m_MaxBlockCount);
13035 json.WriteString(
"Cur");
13036 json.WriteNumber((uint64_t)m_Blocks.size());
13039 if(m_FrameInUseCount > 0)
13041 json.WriteString(
"FrameInUseCount");
13042 json.WriteNumber(m_FrameInUseCount);
13045 if(m_Algorithm != 0)
13047 json.WriteString(
"Algorithm");
13048 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13053 json.WriteString(
"PreferredBlockSize");
13054 json.WriteNumber(m_PreferredBlockSize);
13057 json.WriteString(
"Blocks");
13058 json.BeginObject();
13059 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13061 json.BeginString();
13062 json.ContinueString(m_Blocks[i]->GetId());
13065 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13072 #endif // #if VMA_STATS_STRING_ENABLED
13074 void VmaBlockVector::Defragment(
13075 class VmaBlockVectorDefragmentationContext* pCtx,
13077 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13078 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13079 VkCommandBuffer commandBuffer)
13081 pCtx->res = VK_SUCCESS;
13083 const VkMemoryPropertyFlags memPropFlags =
13084 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13085 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13087 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13089 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13090 !IsCorruptionDetectionEnabled() &&
13091 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13094 if(canDefragmentOnCpu || canDefragmentOnGpu)
13096 bool defragmentOnGpu;
13098 if(canDefragmentOnGpu != canDefragmentOnCpu)
13100 defragmentOnGpu = canDefragmentOnGpu;
13105 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13106 m_hAllocator->IsIntegratedGpu();
13109 bool overlappingMoveSupported = !defragmentOnGpu;
13111 if(m_hAllocator->m_UseMutex)
13115 if(!m_Mutex.TryLockWrite())
13117 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
13123 m_Mutex.LockWrite();
13124 pCtx->mutexLocked =
true;
13128 pCtx->Begin(overlappingMoveSupported, flags);
13132 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
13133 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
13134 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
13137 if(pStats != VMA_NULL)
13139 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
13140 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
13143 VMA_ASSERT(bytesMoved <= maxBytesToMove);
13144 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
13145 if(defragmentOnGpu)
13147 maxGpuBytesToMove -= bytesMoved;
13148 maxGpuAllocationsToMove -= allocationsMoved;
13152 maxCpuBytesToMove -= bytesMoved;
13153 maxCpuAllocationsToMove -= allocationsMoved;
13159 if(m_hAllocator->m_UseMutex)
13160 m_Mutex.UnlockWrite();
13162 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
13163 pCtx->res = VK_NOT_READY;
13168 if(pCtx->res >= VK_SUCCESS)
13170 if(defragmentOnGpu)
13172 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
13176 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
13182 void VmaBlockVector::DefragmentationEnd(
13183 class VmaBlockVectorDefragmentationContext* pCtx,
13187 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
13189 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
13190 if(blockCtx.hBuffer)
13192 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
13193 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
13197 if(pCtx->res >= VK_SUCCESS)
13199 FreeEmptyBlocks(pStats);
13202 if(pCtx->mutexLocked)
13204 VMA_ASSERT(m_hAllocator->m_UseMutex);
13205 m_Mutex.UnlockWrite();
13209 uint32_t VmaBlockVector::ProcessDefragmentations(
13210 class VmaBlockVectorDefragmentationContext *pCtx,
13213 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13215 const uint32_t moveCount = std::min(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
13217 for(uint32_t i = 0; i < moveCount; ++ i)
13219 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
13222 pMove->
memory = move.pDstBlock->GetDeviceMemory();
13223 pMove->
offset = move.dstOffset;
13228 pCtx->defragmentationMovesProcessed += moveCount;
13233 void VmaBlockVector::CommitDefragmentations(
13234 class VmaBlockVectorDefragmentationContext *pCtx,
13237 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13239 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
13241 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
13243 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
13244 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
13247 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
13248 FreeEmptyBlocks(pStats);
13251 size_t VmaBlockVector::CalcAllocationCount()
const
13254 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13256 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
13261 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
13263 if(m_BufferImageGranularity == 1)
13267 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
13268 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
13270 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
13271 VMA_ASSERT(m_Algorithm == 0);
13272 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
13273 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
13281 void VmaBlockVector::MakePoolAllocationsLost(
13282 uint32_t currentFrameIndex,
13283 size_t* pLostAllocationCount)
13285 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13286 size_t lostAllocationCount = 0;
13287 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13289 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13290 VMA_ASSERT(pBlock);
13291 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
13293 if(pLostAllocationCount != VMA_NULL)
13295 *pLostAllocationCount = lostAllocationCount;
13299 VkResult VmaBlockVector::CheckCorruption()
13301 if(!IsCorruptionDetectionEnabled())
13303 return VK_ERROR_FEATURE_NOT_PRESENT;
13306 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13307 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13309 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13310 VMA_ASSERT(pBlock);
13311 VkResult res = pBlock->CheckCorruption(m_hAllocator);
13312 if(res != VK_SUCCESS)
13320 void VmaBlockVector::AddStats(
VmaStats* pStats)
13322 const uint32_t memTypeIndex = m_MemoryTypeIndex;
13323 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
13325 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13327 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13329 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13330 VMA_ASSERT(pBlock);
13331 VMA_HEAVY_ASSERT(pBlock->Validate());
13333 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13334 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13335 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13336 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13343 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13345 VmaBlockVector* pBlockVector,
13346 uint32_t currentFrameIndex,
13347 bool overlappingMoveSupported) :
13348 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13349 m_AllocationCount(0),
13350 m_AllAllocations(false),
13352 m_AllocationsMoved(0),
13353 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13356 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13357 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13359 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13360 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13361 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13362 m_Blocks.push_back(pBlockInfo);
13366 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13369 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13371 for(
size_t i = m_Blocks.size(); i--; )
13373 vma_delete(m_hAllocator, m_Blocks[i]);
13377 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13380 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13382 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13383 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13384 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13386 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13387 (*it)->m_Allocations.push_back(allocInfo);
13394 ++m_AllocationCount;
13398 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13399 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13400 VkDeviceSize maxBytesToMove,
13401 uint32_t maxAllocationsToMove,
13402 bool freeOldAllocations)
13404 if(m_Blocks.empty())
13417 size_t srcBlockMinIndex = 0;
13430 size_t srcBlockIndex = m_Blocks.size() - 1;
13431 size_t srcAllocIndex = SIZE_MAX;
13437 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13439 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13442 if(srcBlockIndex == srcBlockMinIndex)
13449 srcAllocIndex = SIZE_MAX;
13454 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13458 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13459 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13461 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13462 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13463 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13464 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13467 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13469 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13470 VmaAllocationRequest dstAllocRequest;
13471 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13472 m_CurrentFrameIndex,
13473 m_pBlockVector->GetFrameInUseCount(),
13474 m_pBlockVector->GetBufferImageGranularity(),
13481 &dstAllocRequest) &&
13483 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13485 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13488 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13489 (m_BytesMoved + size > maxBytesToMove))
13494 VmaDefragmentationMove move = {};
13495 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13496 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13497 move.srcOffset = srcOffset;
13498 move.dstOffset = dstAllocRequest.offset;
13500 move.hAllocation = allocInfo.m_hAllocation;
13501 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
13502 move.pDstBlock = pDstBlockInfo->m_pBlock;
13504 moves.push_back(move);
13506 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13510 allocInfo.m_hAllocation);
13512 if(freeOldAllocations)
13514 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13515 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13518 if(allocInfo.m_pChanged != VMA_NULL)
13520 *allocInfo.m_pChanged = VK_TRUE;
13523 ++m_AllocationsMoved;
13524 m_BytesMoved += size;
13526 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13534 if(srcAllocIndex > 0)
13540 if(srcBlockIndex > 0)
13543 srcAllocIndex = SIZE_MAX;
13553 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13556 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13558 if(m_Blocks[i]->m_HasNonMovableAllocations)
13566 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13567 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13568 VkDeviceSize maxBytesToMove,
13569 uint32_t maxAllocationsToMove,
13572 if(!m_AllAllocations && m_AllocationCount == 0)
13577 const size_t blockCount = m_Blocks.size();
13578 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13580 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13582 if(m_AllAllocations)
13584 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13585 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13586 it != pMetadata->m_Suballocations.end();
13589 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13591 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13592 pBlockInfo->m_Allocations.push_back(allocInfo);
13597 pBlockInfo->CalcHasNonMovableAllocations();
13601 pBlockInfo->SortAllocationsByOffsetDescending();
13607 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
13610 const uint32_t roundCount = 2;
13613 VkResult result = VK_SUCCESS;
13614 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
13622 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
13623 size_t dstBlockIndex, VkDeviceSize dstOffset,
13624 size_t srcBlockIndex, VkDeviceSize srcOffset)
13626 if(dstBlockIndex < srcBlockIndex)
13630 if(dstBlockIndex > srcBlockIndex)
13634 if(dstOffset < srcOffset)
13644 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
13646 VmaBlockVector* pBlockVector,
13647 uint32_t currentFrameIndex,
13648 bool overlappingMoveSupported) :
13649 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13650 m_OverlappingMoveSupported(overlappingMoveSupported),
13651 m_AllocationCount(0),
13652 m_AllAllocations(false),
13654 m_AllocationsMoved(0),
13655 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
13657 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
13661 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
13665 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
13666 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13667 VkDeviceSize maxBytesToMove,
13668 uint32_t maxAllocationsToMove,
13671 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
13673 const size_t blockCount = m_pBlockVector->GetBlockCount();
13674 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
13679 PreprocessMetadata();
13683 m_BlockInfos.resize(blockCount);
13684 for(
size_t i = 0; i < blockCount; ++i)
13686 m_BlockInfos[i].origBlockIndex = i;
13689 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13690 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13691 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13696 FreeSpaceDatabase freeSpaceDb;
13698 size_t dstBlockInfoIndex = 0;
13699 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13700 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13701 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13702 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13703 VkDeviceSize dstOffset = 0;
13706 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13708 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13709 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13710 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13711 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13712 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13714 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13715 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13716 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13717 if(m_AllocationsMoved == maxAllocationsToMove ||
13718 m_BytesMoved + srcAllocSize > maxBytesToMove)
13723 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13725 VmaDefragmentationMove move = {};
13727 size_t freeSpaceInfoIndex;
13728 VkDeviceSize dstAllocOffset;
13729 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13730 freeSpaceInfoIndex, dstAllocOffset))
13732 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13733 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13734 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13737 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13739 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13743 VmaSuballocation suballoc = *srcSuballocIt;
13744 suballoc.offset = dstAllocOffset;
13745 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13746 m_BytesMoved += srcAllocSize;
13747 ++m_AllocationsMoved;
13749 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13751 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13752 srcSuballocIt = nextSuballocIt;
13754 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13756 move.srcBlockIndex = srcOrigBlockIndex;
13757 move.dstBlockIndex = freeSpaceOrigBlockIndex;
13758 move.srcOffset = srcAllocOffset;
13759 move.dstOffset = dstAllocOffset;
13760 move.size = srcAllocSize;
13762 moves.push_back(move);
13769 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13771 VmaSuballocation suballoc = *srcSuballocIt;
13772 suballoc.offset = dstAllocOffset;
13773 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13774 m_BytesMoved += srcAllocSize;
13775 ++m_AllocationsMoved;
13777 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13779 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13780 srcSuballocIt = nextSuballocIt;
13782 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13784 move.srcBlockIndex = srcOrigBlockIndex;
13785 move.dstBlockIndex = freeSpaceOrigBlockIndex;
13786 move.srcOffset = srcAllocOffset;
13787 move.dstOffset = dstAllocOffset;
13788 move.size = srcAllocSize;
13790 moves.push_back(move);
13795 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13798 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13799 dstAllocOffset + srcAllocSize > dstBlockSize)
13802 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13804 ++dstBlockInfoIndex;
13805 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13806 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13807 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13808 dstBlockSize = pDstMetadata->GetSize();
13810 dstAllocOffset = 0;
13814 if(dstBlockInfoIndex == srcBlockInfoIndex)
13816 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13818 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13820 bool skipOver = overlap;
13821 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13825 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13830 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13832 dstOffset = srcAllocOffset + srcAllocSize;
13838 srcSuballocIt->offset = dstAllocOffset;
13839 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13840 dstOffset = dstAllocOffset + srcAllocSize;
13841 m_BytesMoved += srcAllocSize;
13842 ++m_AllocationsMoved;
13845 move.srcBlockIndex = srcOrigBlockIndex;
13846 move.dstBlockIndex = dstOrigBlockIndex;
13847 move.srcOffset = srcAllocOffset;
13848 move.dstOffset = dstAllocOffset;
13849 move.size = srcAllocSize;
13851 moves.push_back(move);
13859 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13860 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13862 VmaSuballocation suballoc = *srcSuballocIt;
13863 suballoc.offset = dstAllocOffset;
13864 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13865 dstOffset = dstAllocOffset + srcAllocSize;
13866 m_BytesMoved += srcAllocSize;
13867 ++m_AllocationsMoved;
13869 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13871 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13872 srcSuballocIt = nextSuballocIt;
13874 pDstMetadata->m_Suballocations.push_back(suballoc);
13876 move.srcBlockIndex = srcOrigBlockIndex;
13877 move.dstBlockIndex = dstOrigBlockIndex;
13878 move.srcOffset = srcAllocOffset;
13879 move.dstOffset = dstAllocOffset;
13880 move.size = srcAllocSize;
13882 moves.push_back(move);
13888 m_BlockInfos.clear();
13890 PostprocessMetadata();
13895 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13897 const size_t blockCount = m_pBlockVector->GetBlockCount();
13898 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13900 VmaBlockMetadata_Generic*
const pMetadata =
13901 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13902 pMetadata->m_FreeCount = 0;
13903 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13904 pMetadata->m_FreeSuballocationsBySize.clear();
13905 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13906 it != pMetadata->m_Suballocations.end(); )
13908 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13910 VmaSuballocationList::iterator nextIt = it;
13912 pMetadata->m_Suballocations.erase(it);
13923 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13925 const size_t blockCount = m_pBlockVector->GetBlockCount();
13926 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13928 VmaBlockMetadata_Generic*
const pMetadata =
13929 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13930 const VkDeviceSize blockSize = pMetadata->GetSize();
13933 if(pMetadata->m_Suballocations.empty())
13935 pMetadata->m_FreeCount = 1;
13937 VmaSuballocation suballoc = {
13941 VMA_SUBALLOCATION_TYPE_FREE };
13942 pMetadata->m_Suballocations.push_back(suballoc);
13943 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13948 VkDeviceSize offset = 0;
13949 VmaSuballocationList::iterator it;
13950 for(it = pMetadata->m_Suballocations.begin();
13951 it != pMetadata->m_Suballocations.end();
13954 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13955 VMA_ASSERT(it->offset >= offset);
13958 if(it->offset > offset)
13960 ++pMetadata->m_FreeCount;
13961 const VkDeviceSize freeSize = it->offset - offset;
13962 VmaSuballocation suballoc = {
13966 VMA_SUBALLOCATION_TYPE_FREE };
13967 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13968 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13970 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13974 pMetadata->m_SumFreeSize -= it->size;
13975 offset = it->offset + it->size;
13979 if(offset < blockSize)
13981 ++pMetadata->m_FreeCount;
13982 const VkDeviceSize freeSize = blockSize - offset;
13983 VmaSuballocation suballoc = {
13987 VMA_SUBALLOCATION_TYPE_FREE };
13988 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13989 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13990 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13992 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13997 pMetadata->m_FreeSuballocationsBySize.begin(),
13998 pMetadata->m_FreeSuballocationsBySize.end(),
13999 VmaSuballocationItemSizeLess());
14002 VMA_HEAVY_ASSERT(pMetadata->Validate());
14006 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14009 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14010 while(it != pMetadata->m_Suballocations.end())
14012 if(it->offset < suballoc.offset)
14017 pMetadata->m_Suballocations.insert(it, suballoc);
14023 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14026 VmaBlockVector* pBlockVector,
14027 uint32_t currFrameIndex) :
14029 mutexLocked(false),
14030 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14031 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14032 defragmentationMovesProcessed(0),
14033 defragmentationMovesCommitted(0),
14034 hasDefragmentationPlan(0),
14035 m_hAllocator(hAllocator),
14036 m_hCustomPool(hCustomPool),
14037 m_pBlockVector(pBlockVector),
14038 m_CurrFrameIndex(currFrameIndex),
14039 m_pAlgorithm(VMA_NULL),
14040 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14041 m_AllAllocations(false)
14045 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14047 vma_delete(m_hAllocator, m_pAlgorithm);
14050 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14052 AllocInfo info = { hAlloc, pChanged };
14053 m_Allocations.push_back(info);
14056 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14058 const bool allAllocations = m_AllAllocations ||
14059 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
14072 if(VMA_DEBUG_MARGIN == 0 &&
14074 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14077 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14078 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14082 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
14083 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14088 m_pAlgorithm->AddAll();
14092 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
14094 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
14102 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
14104 uint32_t currFrameIndex,
14107 m_hAllocator(hAllocator),
14108 m_CurrFrameIndex(currFrameIndex),
14111 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
14113 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
14116 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
14118 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14120 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
14121 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
14122 vma_delete(m_hAllocator, pBlockVectorCtx);
14124 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
14126 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
14127 if(pBlockVectorCtx)
14129 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
14130 vma_delete(m_hAllocator, pBlockVectorCtx);
14135 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
14137 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
14139 VmaPool pool = pPools[poolIndex];
14142 if(pool->m_BlockVector.GetAlgorithm() == 0)
14144 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14146 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14148 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
14150 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14155 if(!pBlockVectorDefragCtx)
14157 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14160 &pool->m_BlockVector,
14162 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14165 pBlockVectorDefragCtx->AddAll();
14170 void VmaDefragmentationContext_T::AddAllocations(
14171 uint32_t allocationCount,
14173 VkBool32* pAllocationsChanged)
14176 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14179 VMA_ASSERT(hAlloc);
14181 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
14183 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
14185 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14187 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
14189 if(hAllocPool != VK_NULL_HANDLE)
14192 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
14194 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14196 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
14198 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14202 if(!pBlockVectorDefragCtx)
14204 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14207 &hAllocPool->m_BlockVector,
14209 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14216 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
14217 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
14218 if(!pBlockVectorDefragCtx)
14220 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14223 m_hAllocator->m_pBlockVectors[memTypeIndex],
14225 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
14229 if(pBlockVectorDefragCtx)
14231 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
14232 &pAllocationsChanged[allocIndex] : VMA_NULL;
14233 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
14239 VkResult VmaDefragmentationContext_T::Defragment(
14240 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
14241 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
14253 m_MaxCpuBytesToMove = maxCpuBytesToMove;
14254 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
14256 m_MaxGpuBytesToMove = maxGpuBytesToMove;
14257 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
14259 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
14260 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
14263 return VK_NOT_READY;
14266 if(commandBuffer == VK_NULL_HANDLE)
14268 maxGpuBytesToMove = 0;
14269 maxGpuAllocationsToMove = 0;
14272 VkResult res = VK_SUCCESS;
14275 for(uint32_t memTypeIndex = 0;
14276 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
14279 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14280 if(pBlockVectorCtx)
14282 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14283 pBlockVectorCtx->GetBlockVector()->Defragment(
14286 maxCpuBytesToMove, maxCpuAllocationsToMove,
14287 maxGpuBytesToMove, maxGpuAllocationsToMove,
14289 if(pBlockVectorCtx->res != VK_SUCCESS)
14291 res = pBlockVectorCtx->res;
14297 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14298 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
14301 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14302 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14303 pBlockVectorCtx->GetBlockVector()->Defragment(
14306 maxCpuBytesToMove, maxCpuAllocationsToMove,
14307 maxGpuBytesToMove, maxGpuAllocationsToMove,
14309 if(pBlockVectorCtx->res != VK_SUCCESS)
14311 res = pBlockVectorCtx->res;
14324 for(uint32_t memTypeIndex = 0;
14325 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14328 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14329 if(pBlockVectorCtx)
14331 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14333 if(!pBlockVectorCtx->hasDefragmentationPlan)
14335 pBlockVectorCtx->GetBlockVector()->Defragment(
14338 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14339 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14342 if(pBlockVectorCtx->res < VK_SUCCESS)
14345 pBlockVectorCtx->hasDefragmentationPlan =
true;
14348 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14350 pCurrentMove, movesLeft);
14352 movesLeft -= processed;
14353 pCurrentMove += processed;
14358 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14359 customCtxIndex < customCtxCount;
14362 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14363 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14365 if(!pBlockVectorCtx->hasDefragmentationPlan)
14367 pBlockVectorCtx->GetBlockVector()->Defragment(
14370 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14371 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14374 if(pBlockVectorCtx->res < VK_SUCCESS)
14377 pBlockVectorCtx->hasDefragmentationPlan =
true;
14380 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14382 pCurrentMove, movesLeft);
14384 movesLeft -= processed;
14385 pCurrentMove += processed;
14392 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
14394 VkResult res = VK_SUCCESS;
14397 for(uint32_t memTypeIndex = 0;
14398 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14401 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14402 if(pBlockVectorCtx)
14404 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14406 if(!pBlockVectorCtx->hasDefragmentationPlan)
14408 res = VK_NOT_READY;
14412 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14413 pBlockVectorCtx, m_pStats);
14415 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14416 res = VK_NOT_READY;
14421 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14422 customCtxIndex < customCtxCount;
14425 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14426 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14428 if(!pBlockVectorCtx->hasDefragmentationPlan)
14430 res = VK_NOT_READY;
14434 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14435 pBlockVectorCtx, m_pStats);
14437 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14438 res = VK_NOT_READY;
14447 #if VMA_RECORDING_ENABLED
14449 VmaRecorder::VmaRecorder() :
14454 m_StartCounter(INT64_MAX)
14460 m_UseMutex = useMutex;
14461 m_Flags = settings.
flags;
14463 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
14464 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
14467 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
14470 return VK_ERROR_INITIALIZATION_FAILED;
14474 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
14475 fprintf(m_File,
"%s\n",
"1,8");
14480 VmaRecorder::~VmaRecorder()
14482 if(m_File != VMA_NULL)
14488 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
14490 CallParams callParams;
14491 GetBasicParams(callParams);
14493 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14494 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
14498 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
14500 CallParams callParams;
14501 GetBasicParams(callParams);
14503 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14504 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
14510 CallParams callParams;
14511 GetBasicParams(callParams);
14513 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14514 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
14525 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
14527 CallParams callParams;
14528 GetBasicParams(callParams);
14530 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14531 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
14536 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
14537 const VkMemoryRequirements& vkMemReq,
14541 CallParams callParams;
14542 GetBasicParams(callParams);
14544 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14545 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14546 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14548 vkMemReq.alignment,
14549 vkMemReq.memoryTypeBits,
14557 userDataStr.GetString());
14561 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
14562 const VkMemoryRequirements& vkMemReq,
14564 uint64_t allocationCount,
14567 CallParams callParams;
14568 GetBasicParams(callParams);
14570 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14571 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14572 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14574 vkMemReq.alignment,
14575 vkMemReq.memoryTypeBits,
14582 PrintPointerList(allocationCount, pAllocations);
14583 fprintf(m_File,
",%s\n", userDataStr.GetString());
14587 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
14588 const VkMemoryRequirements& vkMemReq,
14589 bool requiresDedicatedAllocation,
14590 bool prefersDedicatedAllocation,
14594 CallParams callParams;
14595 GetBasicParams(callParams);
14597 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14598 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14599 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14601 vkMemReq.alignment,
14602 vkMemReq.memoryTypeBits,
14603 requiresDedicatedAllocation ? 1 : 0,
14604 prefersDedicatedAllocation ? 1 : 0,
14612 userDataStr.GetString());
14616 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
14617 const VkMemoryRequirements& vkMemReq,
14618 bool requiresDedicatedAllocation,
14619 bool prefersDedicatedAllocation,
14623 CallParams callParams;
14624 GetBasicParams(callParams);
14626 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14627 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14628 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14630 vkMemReq.alignment,
14631 vkMemReq.memoryTypeBits,
14632 requiresDedicatedAllocation ? 1 : 0,
14633 prefersDedicatedAllocation ? 1 : 0,
14641 userDataStr.GetString());
14645 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
14648 CallParams callParams;
14649 GetBasicParams(callParams);
14651 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14652 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14657 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
14658 uint64_t allocationCount,
14661 CallParams callParams;
14662 GetBasicParams(callParams);
14664 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14665 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
14666 PrintPointerList(allocationCount, pAllocations);
14667 fprintf(m_File,
"\n");
14671 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
14673 const void* pUserData)
14675 CallParams callParams;
14676 GetBasicParams(callParams);
14678 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14679 UserDataString userDataStr(
14682 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14684 userDataStr.GetString());
14688 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
14691 CallParams callParams;
14692 GetBasicParams(callParams);
14694 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14695 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14700 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
14703 CallParams callParams;
14704 GetBasicParams(callParams);
14706 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14707 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14712 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
14715 CallParams callParams;
14716 GetBasicParams(callParams);
14718 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14719 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14724 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
14725 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14727 CallParams callParams;
14728 GetBasicParams(callParams);
14730 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14731 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14738 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
14739 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14741 CallParams callParams;
14742 GetBasicParams(callParams);
14744 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14745 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14752 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
14753 const VkBufferCreateInfo& bufCreateInfo,
14757 CallParams callParams;
14758 GetBasicParams(callParams);
14760 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14761 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14762 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14763 bufCreateInfo.flags,
14764 bufCreateInfo.size,
14765 bufCreateInfo.usage,
14766 bufCreateInfo.sharingMode,
14767 allocCreateInfo.
flags,
14768 allocCreateInfo.
usage,
14772 allocCreateInfo.
pool,
14774 userDataStr.GetString());
14778 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
14779 const VkImageCreateInfo& imageCreateInfo,
14783 CallParams callParams;
14784 GetBasicParams(callParams);
14786 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14787 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14788 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14789 imageCreateInfo.flags,
14790 imageCreateInfo.imageType,
14791 imageCreateInfo.format,
14792 imageCreateInfo.extent.width,
14793 imageCreateInfo.extent.height,
14794 imageCreateInfo.extent.depth,
14795 imageCreateInfo.mipLevels,
14796 imageCreateInfo.arrayLayers,
14797 imageCreateInfo.samples,
14798 imageCreateInfo.tiling,
14799 imageCreateInfo.usage,
14800 imageCreateInfo.sharingMode,
14801 imageCreateInfo.initialLayout,
14802 allocCreateInfo.
flags,
14803 allocCreateInfo.
usage,
14807 allocCreateInfo.
pool,
14809 userDataStr.GetString());
14813 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
14816 CallParams callParams;
14817 GetBasicParams(callParams);
14819 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14820 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
14825 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14828 CallParams callParams;
14829 GetBasicParams(callParams);
14831 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14832 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
14837 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
14840 CallParams callParams;
14841 GetBasicParams(callParams);
14843 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14844 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14849 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14852 CallParams callParams;
14853 GetBasicParams(callParams);
14855 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14856 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14861 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14864 CallParams callParams;
14865 GetBasicParams(callParams);
14867 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14868 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14873 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14877 CallParams callParams;
14878 GetBasicParams(callParams);
14880 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14881 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14884 fprintf(m_File,
",");
14886 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14896 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14899 CallParams callParams;
14900 GetBasicParams(callParams);
14902 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14903 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14908 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
14912 CallParams callParams;
14913 GetBasicParams(callParams);
14915 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14916 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14917 pool, name != VMA_NULL ? name :
"");
14923 if(pUserData != VMA_NULL)
14927 m_Str = (
const char*)pUserData;
14931 sprintf_s(m_PtrStr,
"%p", pUserData);
14941 void VmaRecorder::WriteConfiguration(
14942 const VkPhysicalDeviceProperties& devProps,
14943 const VkPhysicalDeviceMemoryProperties& memProps,
14944 uint32_t vulkanApiVersion,
14945 bool dedicatedAllocationExtensionEnabled,
14946 bool bindMemory2ExtensionEnabled,
14947 bool memoryBudgetExtensionEnabled,
14948 bool deviceCoherentMemoryExtensionEnabled)
14950 fprintf(m_File,
"Config,Begin\n");
14952 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
14954 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14955 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14956 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14957 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14958 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14959 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14961 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14962 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14963 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14965 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14966 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14968 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14969 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14971 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14972 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14974 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14975 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14978 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14979 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
14980 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
14981 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
14983 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14984 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14985 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14986 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14987 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14988 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14989 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14990 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14991 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14993 fprintf(m_File,
"Config,End\n");
14996 void VmaRecorder::GetBasicParams(CallParams& outParams)
14998 outParams.threadId = GetCurrentThreadId();
15000 LARGE_INTEGER counter;
15001 QueryPerformanceCounter(&counter);
15002 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
15005 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15009 fprintf(m_File,
"%p", pItems[0]);
15010 for(uint64_t i = 1; i < count; ++i)
15012 fprintf(m_File,
" %p", pItems[i]);
15017 void VmaRecorder::Flush()
15025 #endif // #if VMA_RECORDING_ENABLED
15030 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15031 m_Allocator(pAllocationCallbacks, 1024)
15035 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15037 VmaMutexLock mutexLock(m_Mutex);
15038 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15041 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15043 VmaMutexLock mutexLock(m_Mutex);
15044 m_Allocator.Free(hAlloc);
15052 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
15058 m_hDevice(pCreateInfo->device),
15059 m_hInstance(pCreateInfo->instance),
15060 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
15061 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
15062 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
15063 m_AllocationObjectAllocator(&m_AllocationCallbacks),
15064 m_HeapSizeLimitMask(0),
15065 m_PreferredLargeHeapBlockSize(0),
15066 m_PhysicalDevice(pCreateInfo->physicalDevice),
15067 m_CurrentFrameIndex(0),
15068 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
15069 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
15071 m_GlobalMemoryTypeBits(UINT32_MAX)
15073 ,m_pRecorder(VMA_NULL)
15076 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15078 m_UseKhrDedicatedAllocation =
false;
15079 m_UseKhrBindMemory2 =
false;
15082 if(VMA_DEBUG_DETECT_CORRUPTION)
15085 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
15090 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15092 #if !(VMA_DEDICATED_ALLOCATION)
15095 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
15098 #if !(VMA_BIND_MEMORY2)
15101 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
15105 #if !(VMA_MEMORY_BUDGET)
15108 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
15111 #if !(VMA_BUFFER_DEVICE_ADDRESS)
15112 if(m_UseKhrBufferDeviceAddress)
15114 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
15117 #if VMA_VULKAN_VERSION < 1002000
15118 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
15120 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
15123 #if VMA_VULKAN_VERSION < 1001000
15124 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15126 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
15130 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
15131 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
15132 memset(&m_MemProps, 0,
sizeof(m_MemProps));
15134 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
15135 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
15136 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
15147 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
15148 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
15150 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
15151 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
15152 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
15153 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
15158 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
15162 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15164 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
15165 if(limit != VK_WHOLE_SIZE)
15167 m_HeapSizeLimitMask |= 1u << heapIndex;
15168 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
15170 m_MemProps.memoryHeaps[heapIndex].size = limit;
15176 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15178 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
15180 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
15184 preferredBlockSize,
15187 GetBufferImageGranularity(),
15193 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
15200 VkResult res = VK_SUCCESS;
15205 #if VMA_RECORDING_ENABLED
15206 m_pRecorder = vma_new(
this, VmaRecorder)();
15208 if(res != VK_SUCCESS)
15212 m_pRecorder->WriteConfiguration(
15213 m_PhysicalDeviceProperties,
15215 m_VulkanApiVersion,
15216 m_UseKhrDedicatedAllocation,
15217 m_UseKhrBindMemory2,
15218 m_UseExtMemoryBudget,
15219 m_UseAmdDeviceCoherentMemory);
15220 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
15222 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
15223 return VK_ERROR_FEATURE_NOT_PRESENT;
15227 #if VMA_MEMORY_BUDGET
15228 if(m_UseExtMemoryBudget)
15230 UpdateVulkanBudget();
15232 #endif // #if VMA_MEMORY_BUDGET
15237 VmaAllocator_T::~VmaAllocator_T()
15239 #if VMA_RECORDING_ENABLED
15240 if(m_pRecorder != VMA_NULL)
15242 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
15243 vma_delete(
this, m_pRecorder);
15247 VMA_ASSERT(m_Pools.empty());
15249 for(
size_t i = GetMemoryTypeCount(); i--; )
15251 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
15253 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
15256 vma_delete(
this, m_pDedicatedAllocations[i]);
15257 vma_delete(
this, m_pBlockVectors[i]);
15261 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
15263 m_VulkanFunctions.vkGetPhysicalDeviceProperties =
15264 (PFN_vkGetPhysicalDeviceProperties)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceProperties");
15265 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties =
15266 (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties");
15267 m_VulkanFunctions.vkAllocateMemory =
15268 (PFN_vkAllocateMemory)vkGetDeviceProcAddr(m_hDevice,
"vkAllocateMemory");
15269 m_VulkanFunctions.vkFreeMemory =
15270 (PFN_vkFreeMemory)vkGetDeviceProcAddr(m_hDevice,
"vkFreeMemory");
15271 m_VulkanFunctions.vkMapMemory =
15272 (PFN_vkMapMemory)vkGetDeviceProcAddr(m_hDevice,
"vkMapMemory");
15273 m_VulkanFunctions.vkUnmapMemory =
15274 (PFN_vkUnmapMemory)vkGetDeviceProcAddr(m_hDevice,
"vkUnmapMemory");
15275 m_VulkanFunctions.vkFlushMappedMemoryRanges =
15276 (PFN_vkFlushMappedMemoryRanges)vkGetDeviceProcAddr(m_hDevice,
"vkFlushMappedMemoryRanges");
15277 m_VulkanFunctions.vkInvalidateMappedMemoryRanges =
15278 (PFN_vkInvalidateMappedMemoryRanges)vkGetDeviceProcAddr(m_hDevice,
"vkInvalidateMappedMemoryRanges");
15279 m_VulkanFunctions.vkBindBufferMemory =
15280 (PFN_vkBindBufferMemory)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory");
15281 m_VulkanFunctions.vkBindImageMemory =
15282 (PFN_vkBindImageMemory)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory");
15283 m_VulkanFunctions.vkGetBufferMemoryRequirements =
15284 (PFN_vkGetBufferMemoryRequirements)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements");
15285 m_VulkanFunctions.vkGetImageMemoryRequirements =
15286 (PFN_vkGetImageMemoryRequirements)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements");
15287 m_VulkanFunctions.vkCreateBuffer =
15288 (PFN_vkCreateBuffer)vkGetDeviceProcAddr(m_hDevice,
"vkCreateBuffer");
15289 m_VulkanFunctions.vkDestroyBuffer =
15290 (PFN_vkDestroyBuffer)vkGetDeviceProcAddr(m_hDevice,
"vkDestroyBuffer");
15291 m_VulkanFunctions.vkCreateImage =
15292 (PFN_vkCreateImage)vkGetDeviceProcAddr(m_hDevice,
"vkCreateImage");
15293 m_VulkanFunctions.vkDestroyImage =
15294 (PFN_vkDestroyImage)vkGetDeviceProcAddr(m_hDevice,
"vkDestroyImage");
15295 m_VulkanFunctions.vkCmdCopyBuffer =
15296 (PFN_vkCmdCopyBuffer)vkGetDeviceProcAddr(m_hDevice,
"vkCmdCopyBuffer");
15297 #if VMA_VULKAN_VERSION >= 1001000
15298 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15300 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
15301 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
15302 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2");
15303 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
15304 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2");
15305 m_VulkanFunctions.vkBindBufferMemory2KHR =
15306 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2");
15307 m_VulkanFunctions.vkBindImageMemory2KHR =
15308 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2");
15309 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
15310 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2");
15313 #if VMA_DEDICATED_ALLOCATION
15314 if(m_UseKhrDedicatedAllocation)
15316 if(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR ==
nullptr)
15318 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
15319 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
15321 if(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR ==
nullptr)
15323 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
15324 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
15328 #if VMA_BIND_MEMORY2
15329 if(m_UseKhrBindMemory2)
15331 if(m_VulkanFunctions.vkBindBufferMemory2KHR ==
nullptr)
15333 m_VulkanFunctions.vkBindBufferMemory2KHR =
15334 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2KHR");
15336 if(m_VulkanFunctions.vkBindImageMemory2KHR ==
nullptr)
15338 m_VulkanFunctions.vkBindImageMemory2KHR =
15339 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2KHR");
15342 #endif // #if VMA_BIND_MEMORY2
15343 #if VMA_MEMORY_BUDGET
15344 if(m_UseExtMemoryBudget && m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15346 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
15347 if(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR ==
nullptr)
15349 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
15350 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2KHR");
15353 #endif // #if VMA_MEMORY_BUDGET
15355 #define VMA_COPY_IF_NOT_NULL(funcName) \
15356 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
15358 if(pVulkanFunctions != VMA_NULL)
15360 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
15361 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
15362 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
15363 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
15364 VMA_COPY_IF_NOT_NULL(vkMapMemory);
15365 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
15366 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
15367 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
15368 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
15369 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
15370 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
15371 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
15372 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
15373 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
15374 VMA_COPY_IF_NOT_NULL(vkCreateImage);
15375 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
15376 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
15377 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15378 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
15379 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
15381 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15382 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
15383 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
15385 #if VMA_MEMORY_BUDGET
15386 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
15390 #undef VMA_COPY_IF_NOT_NULL
15394 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
15395 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
15396 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
15397 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
15398 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
15399 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
15400 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
15401 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
15402 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
15403 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
15404 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
15405 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
15406 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
15407 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
15408 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
15409 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
15410 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
15411 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15412 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
15414 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
15415 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
15418 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15419 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
15421 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
15422 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
15425 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
15426 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15428 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
15433 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
15435 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15436 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15437 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
15438 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
15441 VkResult VmaAllocator_T::AllocateMemoryOfType(
15443 VkDeviceSize alignment,
15444 bool dedicatedAllocation,
15445 VkBuffer dedicatedBuffer,
15446 VkBufferUsageFlags dedicatedBufferUsage,
15447 VkImage dedicatedImage,
15449 uint32_t memTypeIndex,
15450 VmaSuballocationType suballocType,
15451 size_t allocationCount,
15454 VMA_ASSERT(pAllocations != VMA_NULL);
15455 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
15461 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15471 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
15472 VMA_ASSERT(blockVector);
15474 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
15475 bool preferDedicatedMemory =
15476 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
15477 dedicatedAllocation ||
15479 size > preferredBlockSize / 2;
15481 if(preferDedicatedMemory &&
15483 finalCreateInfo.
pool == VK_NULL_HANDLE)
15492 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15496 return AllocateDedicatedMemory(
15505 dedicatedBufferUsage,
15513 VkResult res = blockVector->Allocate(
15514 m_CurrentFrameIndex.load(),
15521 if(res == VK_SUCCESS)
15529 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15533 res = AllocateDedicatedMemory(
15542 dedicatedBufferUsage,
15546 if(res == VK_SUCCESS)
15549 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
15555 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15562 VkResult VmaAllocator_T::AllocateDedicatedMemory(
15564 VmaSuballocationType suballocType,
15565 uint32_t memTypeIndex,
15568 bool isUserDataString,
15570 VkBuffer dedicatedBuffer,
15571 VkBufferUsageFlags dedicatedBufferUsage,
15572 VkImage dedicatedImage,
15573 size_t allocationCount,
15576 VMA_ASSERT(allocationCount > 0 && pAllocations);
15580 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15582 GetBudget(&heapBudget, heapIndex, 1);
15583 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
15585 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15589 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
15590 allocInfo.memoryTypeIndex = memTypeIndex;
15591 allocInfo.allocationSize = size;
15593 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15594 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
15595 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15597 if(dedicatedBuffer != VK_NULL_HANDLE)
15599 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
15600 dedicatedAllocInfo.buffer = dedicatedBuffer;
15601 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
15603 else if(dedicatedImage != VK_NULL_HANDLE)
15605 dedicatedAllocInfo.image = dedicatedImage;
15606 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
15609 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15611 #if VMA_BUFFER_DEVICE_ADDRESS
15612 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
15613 if(m_UseKhrBufferDeviceAddress)
15615 bool canContainBufferWithDeviceAddress =
true;
15616 if(dedicatedBuffer != VK_NULL_HANDLE)
15618 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
15619 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
15621 else if(dedicatedImage != VK_NULL_HANDLE)
15623 canContainBufferWithDeviceAddress =
false;
15625 if(canContainBufferWithDeviceAddress)
15627 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
15628 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
15631 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
15634 VkResult res = VK_SUCCESS;
15635 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15637 res = AllocateDedicatedMemoryPage(
15645 pAllocations + allocIndex);
15646 if(res != VK_SUCCESS)
15652 if(res == VK_SUCCESS)
15656 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15657 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15658 VMA_ASSERT(pDedicatedAllocations);
15659 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15661 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
15665 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
15670 while(allocIndex--)
15673 VkDeviceMemory hMemory = currAlloc->GetMemory();
15685 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
15686 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
15687 currAlloc->SetUserData(
this, VMA_NULL);
15688 m_AllocationObjectAllocator.Free(currAlloc);
15691 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15697 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
15699 VmaSuballocationType suballocType,
15700 uint32_t memTypeIndex,
15701 const VkMemoryAllocateInfo& allocInfo,
15703 bool isUserDataString,
15707 VkDeviceMemory hMemory = VK_NULL_HANDLE;
15708 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
15711 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15715 void* pMappedData = VMA_NULL;
15718 res = (*m_VulkanFunctions.vkMapMemory)(
15727 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
15728 FreeVulkanMemory(memTypeIndex, size, hMemory);
15733 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
15734 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
15735 (*pAllocation)->SetUserData(
this, pUserData);
15736 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
15737 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15739 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
15745 void VmaAllocator_T::GetBufferMemoryRequirements(
15747 VkMemoryRequirements& memReq,
15748 bool& requiresDedicatedAllocation,
15749 bool& prefersDedicatedAllocation)
const
15751 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15752 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15754 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
15755 memReqInfo.buffer = hBuffer;
15757 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15759 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15760 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
15762 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15764 memReq = memReq2.memoryRequirements;
15765 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15766 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15769 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15771 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
15772 requiresDedicatedAllocation =
false;
15773 prefersDedicatedAllocation =
false;
15777 void VmaAllocator_T::GetImageMemoryRequirements(
15779 VkMemoryRequirements& memReq,
15780 bool& requiresDedicatedAllocation,
15781 bool& prefersDedicatedAllocation)
const
15783 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15784 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15786 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
15787 memReqInfo.image = hImage;
15789 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15791 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15792 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
15794 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15796 memReq = memReq2.memoryRequirements;
15797 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15798 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15801 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15803 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
15804 requiresDedicatedAllocation =
false;
15805 prefersDedicatedAllocation =
false;
15809 VkResult VmaAllocator_T::AllocateMemory(
15810 const VkMemoryRequirements& vkMemReq,
15811 bool requiresDedicatedAllocation,
15812 bool prefersDedicatedAllocation,
15813 VkBuffer dedicatedBuffer,
15814 VkBufferUsageFlags dedicatedBufferUsage,
15815 VkImage dedicatedImage,
15817 VmaSuballocationType suballocType,
15818 size_t allocationCount,
15821 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15823 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
15825 if(vkMemReq.size == 0)
15827 return VK_ERROR_VALIDATION_FAILED_EXT;
15832 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
15833 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15838 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
15839 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15841 if(requiresDedicatedAllocation)
15845 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
15846 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15848 if(createInfo.
pool != VK_NULL_HANDLE)
15850 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
15851 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15854 if((createInfo.
pool != VK_NULL_HANDLE) &&
15857 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
15858 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15861 if(createInfo.
pool != VK_NULL_HANDLE)
15863 const VkDeviceSize alignmentForPool = VMA_MAX(
15864 vkMemReq.alignment,
15865 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
15870 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15875 return createInfo.
pool->m_BlockVector.Allocate(
15876 m_CurrentFrameIndex.load(),
15887 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
15888 uint32_t memTypeIndex = UINT32_MAX;
15890 if(res == VK_SUCCESS)
15892 VkDeviceSize alignmentForMemType = VMA_MAX(
15893 vkMemReq.alignment,
15894 GetMemoryTypeMinAlignment(memTypeIndex));
15896 res = AllocateMemoryOfType(
15898 alignmentForMemType,
15899 requiresDedicatedAllocation || prefersDedicatedAllocation,
15901 dedicatedBufferUsage,
15909 if(res == VK_SUCCESS)
15919 memoryTypeBits &= ~(1u << memTypeIndex);
15922 if(res == VK_SUCCESS)
15924 alignmentForMemType = VMA_MAX(
15925 vkMemReq.alignment,
15926 GetMemoryTypeMinAlignment(memTypeIndex));
15928 res = AllocateMemoryOfType(
15930 alignmentForMemType,
15931 requiresDedicatedAllocation || prefersDedicatedAllocation,
15933 dedicatedBufferUsage,
15941 if(res == VK_SUCCESS)
15951 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15962 void VmaAllocator_T::FreeMemory(
15963 size_t allocationCount,
15966 VMA_ASSERT(pAllocations);
15968 for(
size_t allocIndex = allocationCount; allocIndex--; )
15972 if(allocation != VK_NULL_HANDLE)
15974 if(TouchAllocation(allocation))
15976 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15978 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
15981 switch(allocation->GetType())
15983 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15985 VmaBlockVector* pBlockVector = VMA_NULL;
15986 VmaPool hPool = allocation->GetBlock()->GetParentPool();
15987 if(hPool != VK_NULL_HANDLE)
15989 pBlockVector = &hPool->m_BlockVector;
15993 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15994 pBlockVector = m_pBlockVectors[memTypeIndex];
15996 pBlockVector->Free(allocation);
15999 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16000 FreeDedicatedMemory(allocation);
16008 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
16009 allocation->SetUserData(
this, VMA_NULL);
16010 m_AllocationObjectAllocator.Free(allocation);
16015 VkResult VmaAllocator_T::ResizeAllocation(
16017 VkDeviceSize newSize)
16020 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
16022 return VK_ERROR_VALIDATION_FAILED_EXT;
16024 if(newSize == alloc->GetSize())
16028 return VK_ERROR_OUT_OF_POOL_MEMORY;
16031 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
16034 InitStatInfo(pStats->
total);
16035 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
16037 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
16041 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16043 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16044 VMA_ASSERT(pBlockVector);
16045 pBlockVector->AddStats(pStats);
16050 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16051 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16053 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
16058 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16060 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16061 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16062 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16063 VMA_ASSERT(pDedicatedAllocVector);
16064 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
16067 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
16068 VmaAddStatInfo(pStats->
total, allocationStatInfo);
16069 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
16070 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
16075 VmaPostprocessCalcStatInfo(pStats->
total);
16076 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
16077 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
16078 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
16079 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
16082 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
16084 #if VMA_MEMORY_BUDGET
16085 if(m_UseExtMemoryBudget)
16087 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
16089 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
16090 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16092 const uint32_t heapIndex = firstHeap + i;
16094 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16097 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
16099 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
16100 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16104 outBudget->
usage = 0;
16108 outBudget->
budget = VMA_MIN(
16109 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
16114 UpdateVulkanBudget();
16115 GetBudget(outBudget, firstHeap, heapCount);
16121 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16123 const uint32_t heapIndex = firstHeap + i;
16125 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16129 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16134 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
16136 VkResult VmaAllocator_T::DefragmentationBegin(
16146 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
16147 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
16150 (*pContext)->AddAllocations(
16153 VkResult res = (*pContext)->Defragment(
16158 if(res != VK_NOT_READY)
16160 vma_delete(
this, *pContext);
16161 *pContext = VMA_NULL;
16167 VkResult VmaAllocator_T::DefragmentationEnd(
16170 vma_delete(
this, context);
16174 VkResult VmaAllocator_T::DefragmentationPassBegin(
16178 return context->DefragmentPassBegin(pInfo);
16180 VkResult VmaAllocator_T::DefragmentationPassEnd(
16183 return context->DefragmentPassEnd();
16189 if(hAllocation->CanBecomeLost())
16195 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16196 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16199 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16203 pAllocationInfo->
offset = 0;
16204 pAllocationInfo->
size = hAllocation->GetSize();
16206 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16209 else if(localLastUseFrameIndex == localCurrFrameIndex)
16211 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16212 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16213 pAllocationInfo->
offset = hAllocation->GetOffset();
16214 pAllocationInfo->
size = hAllocation->GetSize();
16216 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16221 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16223 localLastUseFrameIndex = localCurrFrameIndex;
16230 #if VMA_STATS_STRING_ENABLED
16231 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16232 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16235 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16236 if(localLastUseFrameIndex == localCurrFrameIndex)
16242 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16244 localLastUseFrameIndex = localCurrFrameIndex;
16250 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16251 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16252 pAllocationInfo->
offset = hAllocation->GetOffset();
16253 pAllocationInfo->
size = hAllocation->GetSize();
16254 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
16255 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16259 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
16262 if(hAllocation->CanBecomeLost())
16264 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16265 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16268 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16272 else if(localLastUseFrameIndex == localCurrFrameIndex)
16278 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16280 localLastUseFrameIndex = localCurrFrameIndex;
16287 #if VMA_STATS_STRING_ENABLED
16288 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16289 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16292 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16293 if(localLastUseFrameIndex == localCurrFrameIndex)
16299 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16301 localLastUseFrameIndex = localCurrFrameIndex;
16313 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
16323 return VK_ERROR_INITIALIZATION_FAILED;
16327 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
16329 return VK_ERROR_FEATURE_NOT_PRESENT;
16332 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
16334 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
16336 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
16337 if(res != VK_SUCCESS)
16339 vma_delete(
this, *pPool);
16346 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16347 (*pPool)->SetId(m_NextPoolId++);
16348 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
16354 void VmaAllocator_T::DestroyPool(
VmaPool pool)
16358 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16359 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
16360 VMA_ASSERT(success &&
"Pool not found in Allocator.");
16363 vma_delete(
this, pool);
16368 pool->m_BlockVector.GetPoolStats(pPoolStats);
16371 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
16373 m_CurrentFrameIndex.store(frameIndex);
16375 #if VMA_MEMORY_BUDGET
16376 if(m_UseExtMemoryBudget)
16378 UpdateVulkanBudget();
16380 #endif // #if VMA_MEMORY_BUDGET
16383 void VmaAllocator_T::MakePoolAllocationsLost(
16385 size_t* pLostAllocationCount)
16387 hPool->m_BlockVector.MakePoolAllocationsLost(
16388 m_CurrentFrameIndex.load(),
16389 pLostAllocationCount);
16392 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
16394 return hPool->m_BlockVector.CheckCorruption();
16397 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
16399 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
16402 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16404 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
16406 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16407 VMA_ASSERT(pBlockVector);
16408 VkResult localRes = pBlockVector->CheckCorruption();
16411 case VK_ERROR_FEATURE_NOT_PRESENT:
16414 finalRes = VK_SUCCESS;
16424 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16425 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16427 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
16429 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
16432 case VK_ERROR_FEATURE_NOT_PRESENT:
16435 finalRes = VK_SUCCESS;
16447 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
16449 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
16450 (*pAllocation)->InitLost();
16453 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
16455 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
16458 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
16460 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16461 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
16464 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
16465 if(blockBytesAfterAllocation > heapSize)
16467 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16469 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
16477 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
16481 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
16483 if(res == VK_SUCCESS)
16485 #if VMA_MEMORY_BUDGET
16486 ++m_Budget.m_OperationsSinceBudgetFetch;
16490 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
16492 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
16497 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
16503 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
16506 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
16508 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
16512 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
16514 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
16517 VkResult VmaAllocator_T::BindVulkanBuffer(
16518 VkDeviceMemory memory,
16519 VkDeviceSize memoryOffset,
16523 if(pNext != VMA_NULL)
16525 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16526 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16527 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
16529 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
16530 bindBufferMemoryInfo.pNext = pNext;
16531 bindBufferMemoryInfo.buffer = buffer;
16532 bindBufferMemoryInfo.memory = memory;
16533 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16534 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16537 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16539 return VK_ERROR_EXTENSION_NOT_PRESENT;
16544 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
16548 VkResult VmaAllocator_T::BindVulkanImage(
16549 VkDeviceMemory memory,
16550 VkDeviceSize memoryOffset,
16554 if(pNext != VMA_NULL)
16556 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16557 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16558 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
16560 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
16561 bindBufferMemoryInfo.pNext = pNext;
16562 bindBufferMemoryInfo.image = image;
16563 bindBufferMemoryInfo.memory = memory;
16564 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16565 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16568 #endif // #if VMA_BIND_MEMORY2
16570 return VK_ERROR_EXTENSION_NOT_PRESENT;
16575 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
16579 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
16581 if(hAllocation->CanBecomeLost())
16583 return VK_ERROR_MEMORY_MAP_FAILED;
16586 switch(hAllocation->GetType())
16588 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16590 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16591 char *pBytes = VMA_NULL;
16592 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
16593 if(res == VK_SUCCESS)
16595 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
16596 hAllocation->BlockAllocMap();
16600 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16601 return hAllocation->DedicatedAllocMap(
this, ppData);
16604 return VK_ERROR_MEMORY_MAP_FAILED;
16610 switch(hAllocation->GetType())
16612 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16614 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16615 hAllocation->BlockAllocUnmap();
16616 pBlock->Unmap(
this, 1);
16619 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16620 hAllocation->DedicatedAllocUnmap(
this);
16627 VkResult VmaAllocator_T::BindBufferMemory(
16629 VkDeviceSize allocationLocalOffset,
16633 VkResult res = VK_SUCCESS;
16634 switch(hAllocation->GetType())
16636 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16637 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
16639 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16641 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16642 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
16643 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
16652 VkResult VmaAllocator_T::BindImageMemory(
16654 VkDeviceSize allocationLocalOffset,
16658 VkResult res = VK_SUCCESS;
16659 switch(hAllocation->GetType())
16661 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16662 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
16664 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16666 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
16667 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
16668 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
16677 void VmaAllocator_T::FlushOrInvalidateAllocation(
16679 VkDeviceSize offset, VkDeviceSize size,
16680 VMA_CACHE_OPERATION op)
16682 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
16683 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
16685 const VkDeviceSize allocationSize = hAllocation->GetSize();
16686 VMA_ASSERT(offset <= allocationSize);
16688 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
16690 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
16691 memRange.memory = hAllocation->GetMemory();
16693 switch(hAllocation->GetType())
16695 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16696 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16697 if(size == VK_WHOLE_SIZE)
16699 memRange.size = allocationSize - memRange.offset;
16703 VMA_ASSERT(offset + size <= allocationSize);
16704 memRange.size = VMA_MIN(
16705 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
16706 allocationSize - memRange.offset);
16710 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16713 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16714 if(size == VK_WHOLE_SIZE)
16716 size = allocationSize - offset;
16720 VMA_ASSERT(offset + size <= allocationSize);
16722 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
16725 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
16726 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
16727 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
16728 memRange.offset += allocationOffset;
16729 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
16740 case VMA_CACHE_FLUSH:
16741 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
16743 case VMA_CACHE_INVALIDATE:
16744 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
16753 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
16755 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
16757 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16759 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16760 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16761 VMA_ASSERT(pDedicatedAllocations);
16762 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
16763 VMA_ASSERT(success);
16766 VkDeviceMemory hMemory = allocation->GetMemory();
16778 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
16780 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
16783 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
16785 VkBufferCreateInfo dummyBufCreateInfo;
16786 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
16788 uint32_t memoryTypeBits = 0;
16791 VkBuffer buf = VK_NULL_HANDLE;
16792 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
16793 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
16794 if(res == VK_SUCCESS)
16797 VkMemoryRequirements memReq;
16798 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
16799 memoryTypeBits = memReq.memoryTypeBits;
16802 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
16805 return memoryTypeBits;
16808 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
16811 VMA_ASSERT(GetMemoryTypeCount() > 0);
16813 uint32_t memoryTypeBits = UINT32_MAX;
16815 if(!m_UseAmdDeviceCoherentMemory)
16818 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16820 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
16822 memoryTypeBits &= ~(1u << memTypeIndex);
16827 return memoryTypeBits;
16830 #if VMA_MEMORY_BUDGET
16832 void VmaAllocator_T::UpdateVulkanBudget()
16834 VMA_ASSERT(m_UseExtMemoryBudget);
16836 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
16838 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
16839 VmaPnextChainPushFront(&memProps, &budgetProps);
16841 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
16844 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
16846 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16848 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
16849 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
16850 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
16853 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
16855 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16857 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
16859 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
16861 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
16863 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16866 m_Budget.m_OperationsSinceBudgetFetch = 0;
16870 #endif // #if VMA_MEMORY_BUDGET
16872 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
16874 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
16875 !hAllocation->CanBecomeLost() &&
16876 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16878 void* pData = VMA_NULL;
16879 VkResult res = Map(hAllocation, &pData);
16880 if(res == VK_SUCCESS)
16882 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
16883 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
16884 Unmap(hAllocation);
16888 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
16893 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
16895 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
16896 if(memoryTypeBits == UINT32_MAX)
16898 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
16899 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
16901 return memoryTypeBits;
16904 #if VMA_STATS_STRING_ENABLED
16906 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
16908 bool dedicatedAllocationsStarted =
false;
16909 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16911 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16912 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16913 VMA_ASSERT(pDedicatedAllocVector);
16914 if(pDedicatedAllocVector->empty() ==
false)
16916 if(dedicatedAllocationsStarted ==
false)
16918 dedicatedAllocationsStarted =
true;
16919 json.WriteString(
"DedicatedAllocations");
16920 json.BeginObject();
16923 json.BeginString(
"Type ");
16924 json.ContinueString(memTypeIndex);
16929 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
16931 json.BeginObject(
true);
16933 hAlloc->PrintParameters(json);
16940 if(dedicatedAllocationsStarted)
16946 bool allocationsStarted =
false;
16947 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16949 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
16951 if(allocationsStarted ==
false)
16953 allocationsStarted =
true;
16954 json.WriteString(
"DefaultPools");
16955 json.BeginObject();
16958 json.BeginString(
"Type ");
16959 json.ContinueString(memTypeIndex);
16962 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
16965 if(allocationsStarted)
16973 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16974 const size_t poolCount = m_Pools.size();
16977 json.WriteString(
"Pools");
16978 json.BeginObject();
16979 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
16981 json.BeginString();
16982 json.ContinueString(m_Pools[poolIndex]->GetId());
16985 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
16992 #endif // #if VMA_STATS_STRING_ENABLED
17001 VMA_ASSERT(pCreateInfo && pAllocator);
17004 VMA_DEBUG_LOG(
"vmaCreateAllocator");
17006 return (*pAllocator)->Init(pCreateInfo);
17012 if(allocator != VK_NULL_HANDLE)
17014 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
17015 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
17016 vma_delete(&allocationCallbacks, allocator);
17022 VMA_ASSERT(allocator && pAllocatorInfo);
17023 pAllocatorInfo->
instance = allocator->m_hInstance;
17024 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
17025 pAllocatorInfo->
device = allocator->m_hDevice;
17030 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
17032 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
17033 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
17038 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
17040 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
17041 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
17046 uint32_t memoryTypeIndex,
17047 VkMemoryPropertyFlags* pFlags)
17049 VMA_ASSERT(allocator && pFlags);
17050 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
17051 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
17056 uint32_t frameIndex)
17058 VMA_ASSERT(allocator);
17059 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
17061 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17063 allocator->SetCurrentFrameIndex(frameIndex);
17070 VMA_ASSERT(allocator && pStats);
17071 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17072 allocator->CalculateStats(pStats);
17079 VMA_ASSERT(allocator && pBudget);
17080 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17081 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
17084 #if VMA_STATS_STRING_ENABLED
17088 char** ppStatsString,
17089 VkBool32 detailedMap)
17091 VMA_ASSERT(allocator && ppStatsString);
17092 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17094 VmaStringBuilder sb(allocator);
17096 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
17097 json.BeginObject();
17100 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
17103 allocator->CalculateStats(&stats);
17105 json.WriteString(
"Total");
17106 VmaPrintStatInfo(json, stats.
total);
17108 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
17110 json.BeginString(
"Heap ");
17111 json.ContinueString(heapIndex);
17113 json.BeginObject();
17115 json.WriteString(
"Size");
17116 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
17118 json.WriteString(
"Flags");
17119 json.BeginArray(
true);
17120 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
17122 json.WriteString(
"DEVICE_LOCAL");
17126 json.WriteString(
"Budget");
17127 json.BeginObject();
17129 json.WriteString(
"BlockBytes");
17130 json.WriteNumber(budget[heapIndex].blockBytes);
17131 json.WriteString(
"AllocationBytes");
17132 json.WriteNumber(budget[heapIndex].allocationBytes);
17133 json.WriteString(
"Usage");
17134 json.WriteNumber(budget[heapIndex].usage);
17135 json.WriteString(
"Budget");
17136 json.WriteNumber(budget[heapIndex].budget);
17142 json.WriteString(
"Stats");
17143 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
17146 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
17148 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
17150 json.BeginString(
"Type ");
17151 json.ContinueString(typeIndex);
17154 json.BeginObject();
17156 json.WriteString(
"Flags");
17157 json.BeginArray(
true);
17158 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
17159 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
17161 json.WriteString(
"DEVICE_LOCAL");
17163 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17165 json.WriteString(
"HOST_VISIBLE");
17167 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
17169 json.WriteString(
"HOST_COHERENT");
17171 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
17173 json.WriteString(
"HOST_CACHED");
17175 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
17177 json.WriteString(
"LAZILY_ALLOCATED");
17179 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
17181 json.WriteString(
" PROTECTED");
17183 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17185 json.WriteString(
" DEVICE_COHERENT");
17187 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
17189 json.WriteString(
" DEVICE_UNCACHED");
17195 json.WriteString(
"Stats");
17196 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
17205 if(detailedMap == VK_TRUE)
17207 allocator->PrintDetailedMap(json);
17213 const size_t len = sb.GetLength();
17214 char*
const pChars = vma_new_array(allocator,
char, len + 1);
17217 memcpy(pChars, sb.GetData(), len);
17219 pChars[len] =
'\0';
17220 *ppStatsString = pChars;
17225 char* pStatsString)
17227 if(pStatsString != VMA_NULL)
17229 VMA_ASSERT(allocator);
17230 size_t len = strlen(pStatsString);
17231 vma_delete_array(allocator, pStatsString, len + 1);
17235 #endif // #if VMA_STATS_STRING_ENABLED
17242 uint32_t memoryTypeBits,
17244 uint32_t* pMemoryTypeIndex)
17246 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17247 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17248 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17250 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
17257 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
17258 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
17259 uint32_t notPreferredFlags = 0;
17262 switch(pAllocationCreateInfo->
usage)
17267 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17269 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17273 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
17276 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17277 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17279 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17283 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17284 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
17287 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17290 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
17299 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
17301 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
17304 *pMemoryTypeIndex = UINT32_MAX;
17305 uint32_t minCost = UINT32_MAX;
17306 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
17307 memTypeIndex < allocator->GetMemoryTypeCount();
17308 ++memTypeIndex, memTypeBit <<= 1)
17311 if((memTypeBit & memoryTypeBits) != 0)
17313 const VkMemoryPropertyFlags currFlags =
17314 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
17316 if((requiredFlags & ~currFlags) == 0)
17319 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
17320 VmaCountBitsSet(currFlags & notPreferredFlags);
17322 if(currCost < minCost)
17324 *pMemoryTypeIndex = memTypeIndex;
17329 minCost = currCost;
17334 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
17339 const VkBufferCreateInfo* pBufferCreateInfo,
17341 uint32_t* pMemoryTypeIndex)
17343 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17344 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
17345 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17346 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17348 const VkDevice hDev = allocator->m_hDevice;
17349 VkBuffer hBuffer = VK_NULL_HANDLE;
17350 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
17351 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
17352 if(res == VK_SUCCESS)
17354 VkMemoryRequirements memReq = {};
17355 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
17356 hDev, hBuffer, &memReq);
17360 memReq.memoryTypeBits,
17361 pAllocationCreateInfo,
17364 allocator->GetVulkanFunctions().vkDestroyBuffer(
17365 hDev, hBuffer, allocator->GetAllocationCallbacks());
17372 const VkImageCreateInfo* pImageCreateInfo,
17374 uint32_t* pMemoryTypeIndex)
17376 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17377 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
17378 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17379 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17381 const VkDevice hDev = allocator->m_hDevice;
17382 VkImage hImage = VK_NULL_HANDLE;
17383 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
17384 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
17385 if(res == VK_SUCCESS)
17387 VkMemoryRequirements memReq = {};
17388 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
17389 hDev, hImage, &memReq);
17393 memReq.memoryTypeBits,
17394 pAllocationCreateInfo,
17397 allocator->GetVulkanFunctions().vkDestroyImage(
17398 hDev, hImage, allocator->GetAllocationCallbacks());
17408 VMA_ASSERT(allocator && pCreateInfo && pPool);
17410 VMA_DEBUG_LOG(
"vmaCreatePool");
17412 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17414 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
17416 #if VMA_RECORDING_ENABLED
17417 if(allocator->GetRecorder() != VMA_NULL)
17419 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
17430 VMA_ASSERT(allocator);
17432 if(pool == VK_NULL_HANDLE)
17437 VMA_DEBUG_LOG(
"vmaDestroyPool");
17439 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17441 #if VMA_RECORDING_ENABLED
17442 if(allocator->GetRecorder() != VMA_NULL)
17444 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
17448 allocator->DestroyPool(pool);
17456 VMA_ASSERT(allocator && pool && pPoolStats);
17458 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17460 allocator->GetPoolStats(pool, pPoolStats);
17466 size_t* pLostAllocationCount)
17468 VMA_ASSERT(allocator && pool);
17470 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17472 #if VMA_RECORDING_ENABLED
17473 if(allocator->GetRecorder() != VMA_NULL)
17475 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
17479 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
17484 VMA_ASSERT(allocator && pool);
17486 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17488 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
17490 return allocator->CheckPoolCorruption(pool);
17496 const char** ppName)
17498 VMA_ASSERT(allocator && pool);
17500 VMA_DEBUG_LOG(
"vmaGetPoolName");
17502 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17504 *ppName = pool->GetName();
17512 VMA_ASSERT(allocator && pool);
17514 VMA_DEBUG_LOG(
"vmaSetPoolName");
17516 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17518 pool->SetName(pName);
17520 #if VMA_RECORDING_ENABLED
17521 if(allocator->GetRecorder() != VMA_NULL)
17523 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
17530 const VkMemoryRequirements* pVkMemoryRequirements,
17535 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
17537 VMA_DEBUG_LOG(
"vmaAllocateMemory");
17539 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17541 VkResult result = allocator->AllocateMemory(
17542 *pVkMemoryRequirements,
17549 VMA_SUBALLOCATION_TYPE_UNKNOWN,
17553 #if VMA_RECORDING_ENABLED
17554 if(allocator->GetRecorder() != VMA_NULL)
17556 allocator->GetRecorder()->RecordAllocateMemory(
17557 allocator->GetCurrentFrameIndex(),
17558 *pVkMemoryRequirements,
17564 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
17566 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17574 const VkMemoryRequirements* pVkMemoryRequirements,
17576 size_t allocationCount,
17580 if(allocationCount == 0)
17585 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
17587 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
17589 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17591 VkResult result = allocator->AllocateMemory(
17592 *pVkMemoryRequirements,
17599 VMA_SUBALLOCATION_TYPE_UNKNOWN,
17603 #if VMA_RECORDING_ENABLED
17604 if(allocator->GetRecorder() != VMA_NULL)
17606 allocator->GetRecorder()->RecordAllocateMemoryPages(
17607 allocator->GetCurrentFrameIndex(),
17608 *pVkMemoryRequirements,
17610 (uint64_t)allocationCount,
17615 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
17617 for(
size_t i = 0; i < allocationCount; ++i)
17619 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
17633 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17635 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
17637 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17639 VkMemoryRequirements vkMemReq = {};
17640 bool requiresDedicatedAllocation =
false;
17641 bool prefersDedicatedAllocation =
false;
17642 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
17643 requiresDedicatedAllocation,
17644 prefersDedicatedAllocation);
17646 VkResult result = allocator->AllocateMemory(
17648 requiresDedicatedAllocation,
17649 prefersDedicatedAllocation,
17654 VMA_SUBALLOCATION_TYPE_BUFFER,
17658 #if VMA_RECORDING_ENABLED
17659 if(allocator->GetRecorder() != VMA_NULL)
17661 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
17662 allocator->GetCurrentFrameIndex(),
17664 requiresDedicatedAllocation,
17665 prefersDedicatedAllocation,
17671 if(pAllocationInfo && result == VK_SUCCESS)
17673 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17686 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17688 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
17690 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17692 VkMemoryRequirements vkMemReq = {};
17693 bool requiresDedicatedAllocation =
false;
17694 bool prefersDedicatedAllocation =
false;
17695 allocator->GetImageMemoryRequirements(image, vkMemReq,
17696 requiresDedicatedAllocation, prefersDedicatedAllocation);
17698 VkResult result = allocator->AllocateMemory(
17700 requiresDedicatedAllocation,
17701 prefersDedicatedAllocation,
17706 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
17710 #if VMA_RECORDING_ENABLED
17711 if(allocator->GetRecorder() != VMA_NULL)
17713 allocator->GetRecorder()->RecordAllocateMemoryForImage(
17714 allocator->GetCurrentFrameIndex(),
17716 requiresDedicatedAllocation,
17717 prefersDedicatedAllocation,
17723 if(pAllocationInfo && result == VK_SUCCESS)
17725 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17735 VMA_ASSERT(allocator);
17737 if(allocation == VK_NULL_HANDLE)
17742 VMA_DEBUG_LOG(
"vmaFreeMemory");
17744 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17746 #if VMA_RECORDING_ENABLED
17747 if(allocator->GetRecorder() != VMA_NULL)
17749 allocator->GetRecorder()->RecordFreeMemory(
17750 allocator->GetCurrentFrameIndex(),
17755 allocator->FreeMemory(
17762 size_t allocationCount,
17765 if(allocationCount == 0)
17770 VMA_ASSERT(allocator);
17772 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
17774 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17776 #if VMA_RECORDING_ENABLED
17777 if(allocator->GetRecorder() != VMA_NULL)
17779 allocator->GetRecorder()->RecordFreeMemoryPages(
17780 allocator->GetCurrentFrameIndex(),
17781 (uint64_t)allocationCount,
17786 allocator->FreeMemory(allocationCount, pAllocations);
17792 VkDeviceSize newSize)
17794 VMA_ASSERT(allocator && allocation);
17796 VMA_DEBUG_LOG(
"vmaResizeAllocation");
17798 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17800 return allocator->ResizeAllocation(allocation, newSize);
17808 VMA_ASSERT(allocator && allocation && pAllocationInfo);
17810 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17812 #if VMA_RECORDING_ENABLED
17813 if(allocator->GetRecorder() != VMA_NULL)
17815 allocator->GetRecorder()->RecordGetAllocationInfo(
17816 allocator->GetCurrentFrameIndex(),
17821 allocator->GetAllocationInfo(allocation, pAllocationInfo);
17828 VMA_ASSERT(allocator && allocation);
17830 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17832 #if VMA_RECORDING_ENABLED
17833 if(allocator->GetRecorder() != VMA_NULL)
17835 allocator->GetRecorder()->RecordTouchAllocation(
17836 allocator->GetCurrentFrameIndex(),
17841 return allocator->TouchAllocation(allocation);
17849 VMA_ASSERT(allocator && allocation);
17851 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17853 allocation->SetUserData(allocator, pUserData);
17855 #if VMA_RECORDING_ENABLED
17856 if(allocator->GetRecorder() != VMA_NULL)
17858 allocator->GetRecorder()->RecordSetAllocationUserData(
17859 allocator->GetCurrentFrameIndex(),
17870 VMA_ASSERT(allocator && pAllocation);
17872 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17874 allocator->CreateLostAllocation(pAllocation);
17876 #if VMA_RECORDING_ENABLED
17877 if(allocator->GetRecorder() != VMA_NULL)
17879 allocator->GetRecorder()->RecordCreateLostAllocation(
17880 allocator->GetCurrentFrameIndex(),
17891 VMA_ASSERT(allocator && allocation && ppData);
17893 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17895 VkResult res = allocator->Map(allocation, ppData);
17897 #if VMA_RECORDING_ENABLED
17898 if(allocator->GetRecorder() != VMA_NULL)
17900 allocator->GetRecorder()->RecordMapMemory(
17901 allocator->GetCurrentFrameIndex(),
17913 VMA_ASSERT(allocator && allocation);
17915 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17917 #if VMA_RECORDING_ENABLED
17918 if(allocator->GetRecorder() != VMA_NULL)
17920 allocator->GetRecorder()->RecordUnmapMemory(
17921 allocator->GetCurrentFrameIndex(),
17926 allocator->Unmap(allocation);
17931 VMA_ASSERT(allocator && allocation);
17933 VMA_DEBUG_LOG(
"vmaFlushAllocation");
17935 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17937 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
17939 #if VMA_RECORDING_ENABLED
17940 if(allocator->GetRecorder() != VMA_NULL)
17942 allocator->GetRecorder()->RecordFlushAllocation(
17943 allocator->GetCurrentFrameIndex(),
17944 allocation, offset, size);
17951 VMA_ASSERT(allocator && allocation);
17953 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
17955 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17957 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
17959 #if VMA_RECORDING_ENABLED
17960 if(allocator->GetRecorder() != VMA_NULL)
17962 allocator->GetRecorder()->RecordInvalidateAllocation(
17963 allocator->GetCurrentFrameIndex(),
17964 allocation, offset, size);
17971 VMA_ASSERT(allocator);
17973 VMA_DEBUG_LOG(
"vmaCheckCorruption");
17975 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17977 return allocator->CheckCorruption(memoryTypeBits);
17983 size_t allocationCount,
17984 VkBool32* pAllocationsChanged,
17994 if(pDefragmentationInfo != VMA_NULL)
18008 if(res == VK_NOT_READY)
18021 VMA_ASSERT(allocator && pInfo && pContext);
18032 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
18034 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
18036 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18038 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
18040 #if VMA_RECORDING_ENABLED
18041 if(allocator->GetRecorder() != VMA_NULL)
18043 allocator->GetRecorder()->RecordDefragmentationBegin(
18044 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
18055 VMA_ASSERT(allocator);
18057 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
18059 if(context != VK_NULL_HANDLE)
18061 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18063 #if VMA_RECORDING_ENABLED
18064 if(allocator->GetRecorder() != VMA_NULL)
18066 allocator->GetRecorder()->RecordDefragmentationEnd(
18067 allocator->GetCurrentFrameIndex(), context);
18071 return allocator->DefragmentationEnd(context);
18085 VMA_ASSERT(allocator);
18087 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
moveCount, pInfo->
pMoves));
18089 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
18091 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18093 if(context == VK_NULL_HANDLE)
18099 return allocator->DefragmentationPassBegin(pInfo, context);
18105 VMA_ASSERT(allocator);
18107 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
18108 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18110 if(context == VK_NULL_HANDLE)
18113 return allocator->DefragmentationPassEnd(context);
18121 VMA_ASSERT(allocator && allocation && buffer);
18123 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
18125 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18127 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
18133 VkDeviceSize allocationLocalOffset,
18137 VMA_ASSERT(allocator && allocation && buffer);
18139 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
18141 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18143 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
18151 VMA_ASSERT(allocator && allocation && image);
18153 VMA_DEBUG_LOG(
"vmaBindImageMemory");
18155 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18157 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
18163 VkDeviceSize allocationLocalOffset,
18167 VMA_ASSERT(allocator && allocation && image);
18169 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
18171 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18173 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
18178 const VkBufferCreateInfo* pBufferCreateInfo,
18184 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
18186 if(pBufferCreateInfo->size == 0)
18188 return VK_ERROR_VALIDATION_FAILED_EXT;
18190 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
18191 !allocator->m_UseKhrBufferDeviceAddress)
18193 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
18194 return VK_ERROR_VALIDATION_FAILED_EXT;
18197 VMA_DEBUG_LOG(
"vmaCreateBuffer");
18199 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18201 *pBuffer = VK_NULL_HANDLE;
18202 *pAllocation = VK_NULL_HANDLE;
18205 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
18206 allocator->m_hDevice,
18208 allocator->GetAllocationCallbacks(),
18213 VkMemoryRequirements vkMemReq = {};
18214 bool requiresDedicatedAllocation =
false;
18215 bool prefersDedicatedAllocation =
false;
18216 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
18217 requiresDedicatedAllocation, prefersDedicatedAllocation);
18220 res = allocator->AllocateMemory(
18222 requiresDedicatedAllocation,
18223 prefersDedicatedAllocation,
18225 pBufferCreateInfo->usage,
18227 *pAllocationCreateInfo,
18228 VMA_SUBALLOCATION_TYPE_BUFFER,
18232 #if VMA_RECORDING_ENABLED
18233 if(allocator->GetRecorder() != VMA_NULL)
18235 allocator->GetRecorder()->RecordCreateBuffer(
18236 allocator->GetCurrentFrameIndex(),
18237 *pBufferCreateInfo,
18238 *pAllocationCreateInfo,
18248 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
18253 #if VMA_STATS_STRING_ENABLED
18254 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
18256 if(pAllocationInfo != VMA_NULL)
18258 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18263 allocator->FreeMemory(
18266 *pAllocation = VK_NULL_HANDLE;
18267 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18268 *pBuffer = VK_NULL_HANDLE;
18271 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18272 *pBuffer = VK_NULL_HANDLE;
18283 VMA_ASSERT(allocator);
18285 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18290 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
18292 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18294 #if VMA_RECORDING_ENABLED
18295 if(allocator->GetRecorder() != VMA_NULL)
18297 allocator->GetRecorder()->RecordDestroyBuffer(
18298 allocator->GetCurrentFrameIndex(),
18303 if(buffer != VK_NULL_HANDLE)
18305 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
18308 if(allocation != VK_NULL_HANDLE)
18310 allocator->FreeMemory(
18318 const VkImageCreateInfo* pImageCreateInfo,
18324 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
18326 if(pImageCreateInfo->extent.width == 0 ||
18327 pImageCreateInfo->extent.height == 0 ||
18328 pImageCreateInfo->extent.depth == 0 ||
18329 pImageCreateInfo->mipLevels == 0 ||
18330 pImageCreateInfo->arrayLayers == 0)
18332 return VK_ERROR_VALIDATION_FAILED_EXT;
18335 VMA_DEBUG_LOG(
"vmaCreateImage");
18337 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18339 *pImage = VK_NULL_HANDLE;
18340 *pAllocation = VK_NULL_HANDLE;
18343 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
18344 allocator->m_hDevice,
18346 allocator->GetAllocationCallbacks(),
18350 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
18351 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
18352 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
18355 VkMemoryRequirements vkMemReq = {};
18356 bool requiresDedicatedAllocation =
false;
18357 bool prefersDedicatedAllocation =
false;
18358 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
18359 requiresDedicatedAllocation, prefersDedicatedAllocation);
18361 res = allocator->AllocateMemory(
18363 requiresDedicatedAllocation,
18364 prefersDedicatedAllocation,
18368 *pAllocationCreateInfo,
18373 #if VMA_RECORDING_ENABLED
18374 if(allocator->GetRecorder() != VMA_NULL)
18376 allocator->GetRecorder()->RecordCreateImage(
18377 allocator->GetCurrentFrameIndex(),
18379 *pAllocationCreateInfo,
18389 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
18394 #if VMA_STATS_STRING_ENABLED
18395 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
18397 if(pAllocationInfo != VMA_NULL)
18399 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18404 allocator->FreeMemory(
18407 *pAllocation = VK_NULL_HANDLE;
18408 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18409 *pImage = VK_NULL_HANDLE;
18412 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18413 *pImage = VK_NULL_HANDLE;
18424 VMA_ASSERT(allocator);
18426 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18431 VMA_DEBUG_LOG(
"vmaDestroyImage");
18433 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18435 #if VMA_RECORDING_ENABLED
18436 if(allocator->GetRecorder() != VMA_NULL)
18438 allocator->GetRecorder()->RecordDestroyImage(
18439 allocator->GetCurrentFrameIndex(),
18444 if(image != VK_NULL_HANDLE)
18446 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
18448 if(allocation != VK_NULL_HANDLE)
18450 allocator->FreeMemory(
18456 #endif // #ifdef VMA_IMPLEMENTATION