23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1874 #ifndef VMA_RECORDING_ENABLED
1875 #define VMA_RECORDING_ENABLED 0
1879 #define NOMINMAX // For windows.h
1883 #include <vulkan/vulkan.h>
1886 #if VMA_RECORDING_ENABLED
1887 #include <windows.h>
1893 #if !defined(VMA_VULKAN_VERSION)
1894 #if defined(VK_VERSION_1_2)
1895 #define VMA_VULKAN_VERSION 1002000
1896 #elif defined(VK_VERSION_1_1)
1897 #define VMA_VULKAN_VERSION 1001000
1899 #define VMA_VULKAN_VERSION 1000000
1903 #if !defined(VMA_DEDICATED_ALLOCATION)
1904 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1905 #define VMA_DEDICATED_ALLOCATION 1
1907 #define VMA_DEDICATED_ALLOCATION 0
1911 #if !defined(VMA_BIND_MEMORY2)
1912 #if VK_KHR_bind_memory2
1913 #define VMA_BIND_MEMORY2 1
1915 #define VMA_BIND_MEMORY2 0
1919 #if !defined(VMA_MEMORY_BUDGET)
1920 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
1921 #define VMA_MEMORY_BUDGET 1
1923 #define VMA_MEMORY_BUDGET 0
1928 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
1929 #if VK_KHR_buffer_device_address || VK_EXT_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
1930 #define VMA_BUFFER_DEVICE_ADDRESS 1
1932 #define VMA_BUFFER_DEVICE_ADDRESS 0
1941 #ifndef VMA_CALL_PRE
1942 #define VMA_CALL_PRE
1944 #ifndef VMA_CALL_POST
1945 #define VMA_CALL_POST
1962 uint32_t memoryType,
1963 VkDeviceMemory memory,
1968 uint32_t memoryType,
1969 VkDeviceMemory memory,
2108 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2109 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
2110 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
2112 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2113 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
2114 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
2116 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2117 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
2287 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
2295 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
2305 uint32_t memoryTypeIndex,
2306 VkMemoryPropertyFlags* pFlags);
2318 uint32_t frameIndex);
2414 #ifndef VMA_STATS_STRING_ENABLED
2415 #define VMA_STATS_STRING_ENABLED 1
2418 #if VMA_STATS_STRING_ENABLED
2425 char** ppStatsString,
2426 VkBool32 detailedMap);
2430 char* pStatsString);
2432 #endif // #if VMA_STATS_STRING_ENABLED
2684 uint32_t memoryTypeBits,
2686 uint32_t* pMemoryTypeIndex);
2702 const VkBufferCreateInfo* pBufferCreateInfo,
2704 uint32_t* pMemoryTypeIndex);
2720 const VkImageCreateInfo* pImageCreateInfo,
2722 uint32_t* pMemoryTypeIndex);
2894 size_t* pLostAllocationCount);
2921 const char** ppName);
3014 const VkMemoryRequirements* pVkMemoryRequirements,
3040 const VkMemoryRequirements* pVkMemoryRequirements,
3042 size_t allocationCount,
3087 size_t allocationCount,
3100 VkDeviceSize newSize);
3518 size_t allocationCount,
3519 VkBool32* pAllocationsChanged,
3553 VkDeviceSize allocationLocalOffset,
3587 VkDeviceSize allocationLocalOffset,
3619 const VkBufferCreateInfo* pBufferCreateInfo,
3644 const VkImageCreateInfo* pImageCreateInfo,
3670 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3673 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3674 #define VMA_IMPLEMENTATION
3677 #ifdef VMA_IMPLEMENTATION
3678 #undef VMA_IMPLEMENTATION
3701 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3702 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3714 #if VMA_USE_STL_CONTAINERS
3715 #define VMA_USE_STL_VECTOR 1
3716 #define VMA_USE_STL_UNORDERED_MAP 1
3717 #define VMA_USE_STL_LIST 1
3720 #ifndef VMA_USE_STL_SHARED_MUTEX
3722 #if __cplusplus >= 201703L
3723 #define VMA_USE_STL_SHARED_MUTEX 1
3727 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3728 #define VMA_USE_STL_SHARED_MUTEX 1
3730 #define VMA_USE_STL_SHARED_MUTEX 0
3738 #if VMA_USE_STL_VECTOR
3742 #if VMA_USE_STL_UNORDERED_MAP
3743 #include <unordered_map>
3746 #if VMA_USE_STL_LIST
3755 #include <algorithm>
3760 #define VMA_NULL nullptr
3763 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3765 void *aligned_alloc(
size_t alignment,
size_t size)
3768 if(alignment <
sizeof(
void*))
3770 alignment =
sizeof(
void*);
3773 return memalign(alignment, size);
3775 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3777 void *aligned_alloc(
size_t alignment,
size_t size)
3780 if(alignment <
sizeof(
void*))
3782 alignment =
sizeof(
void*);
3786 if(posix_memalign(&pointer, alignment, size) == 0)
3800 #define VMA_ASSERT(expr)
3802 #define VMA_ASSERT(expr) assert(expr)
3808 #ifndef VMA_HEAVY_ASSERT
3810 #define VMA_HEAVY_ASSERT(expr)
3812 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3816 #ifndef VMA_ALIGN_OF
3817 #define VMA_ALIGN_OF(type) (__alignof(type))
3820 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3822 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3824 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3828 #ifndef VMA_SYSTEM_FREE
3830 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3832 #define VMA_SYSTEM_FREE(ptr) free(ptr)
3837 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
3841 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
3845 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
3849 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
3852 #ifndef VMA_DEBUG_LOG
3853 #define VMA_DEBUG_LOG(format, ...)
3863 #if VMA_STATS_STRING_ENABLED
3864 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3866 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
3868 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3870 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
3872 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3874 snprintf(outStr, strLen,
"%p", ptr);
3882 void Lock() { m_Mutex.lock(); }
3883 void Unlock() { m_Mutex.unlock(); }
3884 bool TryLock() {
return m_Mutex.try_lock(); }
3888 #define VMA_MUTEX VmaMutex
3892 #ifndef VMA_RW_MUTEX
3893 #if VMA_USE_STL_SHARED_MUTEX
3895 #include <shared_mutex>
3899 void LockRead() { m_Mutex.lock_shared(); }
3900 void UnlockRead() { m_Mutex.unlock_shared(); }
3901 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
3902 void LockWrite() { m_Mutex.lock(); }
3903 void UnlockWrite() { m_Mutex.unlock(); }
3904 bool TryLockWrite() {
return m_Mutex.try_lock(); }
3906 std::shared_mutex m_Mutex;
3908 #define VMA_RW_MUTEX VmaRWMutex
3909 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
3915 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3916 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3917 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3918 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
3919 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3920 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3921 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
3925 #define VMA_RW_MUTEX VmaRWMutex
3931 void LockRead() { m_Mutex.Lock(); }
3932 void UnlockRead() { m_Mutex.Unlock(); }
3933 bool TryLockRead() {
return m_Mutex.TryLock(); }
3934 void LockWrite() { m_Mutex.Lock(); }
3935 void UnlockWrite() { m_Mutex.Unlock(); }
3936 bool TryLockWrite() {
return m_Mutex.TryLock(); }
3940 #define VMA_RW_MUTEX VmaRWMutex
3941 #endif // #if VMA_USE_STL_SHARED_MUTEX
3942 #endif // #ifndef VMA_RW_MUTEX
3947 #ifndef VMA_ATOMIC_UINT32
3949 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
3952 #ifndef VMA_ATOMIC_UINT64
3954 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
3957 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
3962 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
3965 #ifndef VMA_DEBUG_ALIGNMENT
3970 #define VMA_DEBUG_ALIGNMENT (1)
3973 #ifndef VMA_DEBUG_MARGIN
3978 #define VMA_DEBUG_MARGIN (0)
3981 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
3986 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
3989 #ifndef VMA_DEBUG_DETECT_CORRUPTION
3995 #define VMA_DEBUG_DETECT_CORRUPTION (0)
3998 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4003 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4006 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4011 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4014 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4015 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4019 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4020 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4024 #ifndef VMA_CLASS_NO_COPY
4025 #define VMA_CLASS_NO_COPY(className) \
4027 className(const className&) = delete; \
4028 className& operator=(const className&) = delete;
4031 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4034 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4036 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4037 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4045 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4046 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4047 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4049 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4051 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4052 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4055 static inline uint32_t VmaCountBitsSet(uint32_t v)
4057 uint32_t c = v - ((v >> 1) & 0x55555555);
4058 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4059 c = ((c >> 4) + c) & 0x0F0F0F0F;
4060 c = ((c >> 8) + c) & 0x00FF00FF;
4061 c = ((c >> 16) + c) & 0x0000FFFF;
4067 template <
typename T>
4068 static inline T VmaAlignUp(T val, T align)
4070 return (val + align - 1) / align * align;
4074 template <
typename T>
4075 static inline T VmaAlignDown(T val, T align)
4077 return val / align * align;
4081 template <
typename T>
4082 static inline T VmaRoundDiv(T x, T y)
4084 return (x + (y / (T)2)) / y;
4092 template <
typename T>
4093 inline bool VmaIsPow2(T x)
4095 return (x & (x-1)) == 0;
4099 static inline uint32_t VmaNextPow2(uint32_t v)
4110 static inline uint64_t VmaNextPow2(uint64_t v)
4124 static inline uint32_t VmaPrevPow2(uint32_t v)
4134 static inline uint64_t VmaPrevPow2(uint64_t v)
4146 static inline bool VmaStrIsEmpty(
const char* pStr)
4148 return pStr == VMA_NULL || *pStr ==
'\0';
4151 #if VMA_STATS_STRING_ENABLED
4153 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4169 #endif // #if VMA_STATS_STRING_ENABLED
4173 template<
typename Iterator,
typename Compare>
4174 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4176 Iterator centerValue = end; --centerValue;
4177 Iterator insertIndex = beg;
4178 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4180 if(cmp(*memTypeIndex, *centerValue))
4182 if(insertIndex != memTypeIndex)
4184 VMA_SWAP(*memTypeIndex, *insertIndex);
4189 if(insertIndex != centerValue)
4191 VMA_SWAP(*insertIndex, *centerValue);
4196 template<
typename Iterator,
typename Compare>
4197 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4201 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4202 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4203 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4207 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4209 #endif // #ifndef VMA_SORT
4218 static inline bool VmaBlocksOnSamePage(
4219 VkDeviceSize resourceAOffset,
4220 VkDeviceSize resourceASize,
4221 VkDeviceSize resourceBOffset,
4222 VkDeviceSize pageSize)
4224 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4225 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4226 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4227 VkDeviceSize resourceBStart = resourceBOffset;
4228 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4229 return resourceAEndPage == resourceBStartPage;
4232 enum VmaSuballocationType
4234 VMA_SUBALLOCATION_TYPE_FREE = 0,
4235 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4236 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4237 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4238 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4239 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4240 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4249 static inline bool VmaIsBufferImageGranularityConflict(
4250 VmaSuballocationType suballocType1,
4251 VmaSuballocationType suballocType2)
4253 if(suballocType1 > suballocType2)
4255 VMA_SWAP(suballocType1, suballocType2);
4258 switch(suballocType1)
4260 case VMA_SUBALLOCATION_TYPE_FREE:
4262 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4264 case VMA_SUBALLOCATION_TYPE_BUFFER:
4266 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4267 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4268 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4270 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4271 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4272 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4273 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4275 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4276 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4284 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4286 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4287 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4288 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4289 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4291 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4298 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4300 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4301 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4302 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4303 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4305 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4318 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4320 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4321 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4322 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4323 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4329 VMA_CLASS_NO_COPY(VmaMutexLock)
4331 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4332 m_pMutex(useMutex ? &mutex : VMA_NULL)
4333 {
if(m_pMutex) { m_pMutex->Lock(); } }
4335 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4337 VMA_MUTEX* m_pMutex;
4341 struct VmaMutexLockRead
4343 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4345 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4346 m_pMutex(useMutex ? &mutex : VMA_NULL)
4347 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4348 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4350 VMA_RW_MUTEX* m_pMutex;
4354 struct VmaMutexLockWrite
4356 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4358 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4359 m_pMutex(useMutex ? &mutex : VMA_NULL)
4360 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4361 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4363 VMA_RW_MUTEX* m_pMutex;
4366 #if VMA_DEBUG_GLOBAL_MUTEX
4367 static VMA_MUTEX gDebugGlobalMutex;
4368 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4370 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4374 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4385 template <
typename CmpLess,
typename IterT,
typename KeyT>
4386 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4388 size_t down = 0, up = (end - beg);
4391 const size_t mid = (down + up) / 2;
4392 if(cmp(*(beg+mid), key))
4404 template<
typename CmpLess,
typename IterT,
typename KeyT>
4405 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4407 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4408 beg, end, value, cmp);
4410 (!cmp(*it, value) && !cmp(value, *it)))
4422 template<
typename T>
4423 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4425 for(uint32_t i = 0; i < count; ++i)
4427 const T iPtr = arr[i];
4428 if(iPtr == VMA_NULL)
4432 for(uint32_t j = i + 1; j < count; ++j)
4443 template<
typename MainT,
typename NewT>
4444 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4446 newStruct->pNext = mainStruct->pNext;
4447 mainStruct->pNext = newStruct;
4453 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4455 if((pAllocationCallbacks != VMA_NULL) &&
4456 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4458 return (*pAllocationCallbacks->pfnAllocation)(
4459 pAllocationCallbacks->pUserData,
4462 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4466 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4470 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4472 if((pAllocationCallbacks != VMA_NULL) &&
4473 (pAllocationCallbacks->pfnFree != VMA_NULL))
4475 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4479 VMA_SYSTEM_FREE(ptr);
4483 template<
typename T>
4484 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4486 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4489 template<
typename T>
4490 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4492 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4495 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4497 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4499 template<
typename T>
4500 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4503 VmaFree(pAllocationCallbacks, ptr);
4506 template<
typename T>
4507 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4511 for(
size_t i = count; i--; )
4515 VmaFree(pAllocationCallbacks, ptr);
4519 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4521 if(srcStr != VMA_NULL)
4523 const size_t len = strlen(srcStr);
4524 char*
const result = vma_new_array(allocs,
char, len + 1);
4525 memcpy(result, srcStr, len + 1);
4534 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4538 const size_t len = strlen(str);
4539 vma_delete_array(allocs, str, len + 1);
4544 template<
typename T>
4545 class VmaStlAllocator
4548 const VkAllocationCallbacks*
const m_pCallbacks;
4549 typedef T value_type;
4551 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4552 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4554 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4555 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4557 template<
typename U>
4558 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4560 return m_pCallbacks == rhs.m_pCallbacks;
4562 template<
typename U>
4563 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4565 return m_pCallbacks != rhs.m_pCallbacks;
4568 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4571 #if VMA_USE_STL_VECTOR
4573 #define VmaVector std::vector
4575 template<
typename T,
typename allocatorT>
4576 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4578 vec.insert(vec.begin() + index, item);
4581 template<
typename T,
typename allocatorT>
4582 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4584 vec.erase(vec.begin() + index);
4587 #else // #if VMA_USE_STL_VECTOR
4592 template<
typename T,
typename AllocatorT>
4596 typedef T value_type;
4598 VmaVector(
const AllocatorT& allocator) :
4599 m_Allocator(allocator),
4606 VmaVector(
size_t count,
const AllocatorT& allocator) :
4607 m_Allocator(allocator),
4608 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4616 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4617 : VmaVector(count, allocator) {}
4619 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4620 m_Allocator(src.m_Allocator),
4621 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4622 m_Count(src.m_Count),
4623 m_Capacity(src.m_Count)
4627 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4633 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4636 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4640 resize(rhs.m_Count);
4643 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4649 bool empty()
const {
return m_Count == 0; }
4650 size_t size()
const {
return m_Count; }
4651 T* data() {
return m_pArray; }
4652 const T* data()
const {
return m_pArray; }
4654 T& operator[](
size_t index)
4656 VMA_HEAVY_ASSERT(index < m_Count);
4657 return m_pArray[index];
4659 const T& operator[](
size_t index)
const
4661 VMA_HEAVY_ASSERT(index < m_Count);
4662 return m_pArray[index];
4667 VMA_HEAVY_ASSERT(m_Count > 0);
4670 const T& front()
const
4672 VMA_HEAVY_ASSERT(m_Count > 0);
4677 VMA_HEAVY_ASSERT(m_Count > 0);
4678 return m_pArray[m_Count - 1];
4680 const T& back()
const
4682 VMA_HEAVY_ASSERT(m_Count > 0);
4683 return m_pArray[m_Count - 1];
4686 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4688 newCapacity = VMA_MAX(newCapacity, m_Count);
4690 if((newCapacity < m_Capacity) && !freeMemory)
4692 newCapacity = m_Capacity;
4695 if(newCapacity != m_Capacity)
4697 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4700 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4702 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4703 m_Capacity = newCapacity;
4704 m_pArray = newArray;
4708 void resize(
size_t newCount,
bool freeMemory =
false)
4710 size_t newCapacity = m_Capacity;
4711 if(newCount > m_Capacity)
4713 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4717 newCapacity = newCount;
4720 if(newCapacity != m_Capacity)
4722 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4723 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4724 if(elementsToCopy != 0)
4726 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4728 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4729 m_Capacity = newCapacity;
4730 m_pArray = newArray;
4736 void clear(
bool freeMemory =
false)
4738 resize(0, freeMemory);
4741 void insert(
size_t index,
const T& src)
4743 VMA_HEAVY_ASSERT(index <= m_Count);
4744 const size_t oldCount = size();
4745 resize(oldCount + 1);
4746 if(index < oldCount)
4748 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4750 m_pArray[index] = src;
4753 void remove(
size_t index)
4755 VMA_HEAVY_ASSERT(index < m_Count);
4756 const size_t oldCount = size();
4757 if(index < oldCount - 1)
4759 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4761 resize(oldCount - 1);
4764 void push_back(
const T& src)
4766 const size_t newIndex = size();
4767 resize(newIndex + 1);
4768 m_pArray[newIndex] = src;
4773 VMA_HEAVY_ASSERT(m_Count > 0);
4777 void push_front(
const T& src)
4784 VMA_HEAVY_ASSERT(m_Count > 0);
4788 typedef T* iterator;
4790 iterator begin() {
return m_pArray; }
4791 iterator end() {
return m_pArray + m_Count; }
4794 AllocatorT m_Allocator;
4800 template<
typename T,
typename allocatorT>
4801 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4803 vec.insert(index, item);
4806 template<
typename T,
typename allocatorT>
4807 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4812 #endif // #if VMA_USE_STL_VECTOR
4814 template<
typename CmpLess,
typename VectorT>
4815 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4817 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4819 vector.data() + vector.size(),
4821 CmpLess()) - vector.data();
4822 VmaVectorInsert(vector, indexToInsert, value);
4823 return indexToInsert;
4826 template<
typename CmpLess,
typename VectorT>
4827 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4830 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4835 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4837 size_t indexToRemove = it - vector.begin();
4838 VmaVectorRemove(vector, indexToRemove);
4852 template<
typename T>
4853 class VmaPoolAllocator
4855 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4857 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4858 ~VmaPoolAllocator();
4859 template<
typename... Types> T* Alloc(Types... args);
4865 uint32_t NextFreeIndex;
4866 alignas(T)
char Value[
sizeof(T)];
4873 uint32_t FirstFreeIndex;
4876 const VkAllocationCallbacks* m_pAllocationCallbacks;
4877 const uint32_t m_FirstBlockCapacity;
4878 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4880 ItemBlock& CreateNewBlock();
4883 template<
typename T>
4884 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4885 m_pAllocationCallbacks(pAllocationCallbacks),
4886 m_FirstBlockCapacity(firstBlockCapacity),
4887 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4889 VMA_ASSERT(m_FirstBlockCapacity > 1);
4892 template<
typename T>
4893 VmaPoolAllocator<T>::~VmaPoolAllocator()
4895 for(
size_t i = m_ItemBlocks.size(); i--; )
4896 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4897 m_ItemBlocks.clear();
4900 template<
typename T>
4901 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
4903 for(
size_t i = m_ItemBlocks.size(); i--; )
4905 ItemBlock& block = m_ItemBlocks[i];
4907 if(block.FirstFreeIndex != UINT32_MAX)
4909 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4910 block.FirstFreeIndex = pItem->NextFreeIndex;
4911 T* result = (T*)&pItem->Value;
4912 new(result)T(std::forward<Types>(args)...);
4918 ItemBlock& newBlock = CreateNewBlock();
4919 Item*
const pItem = &newBlock.pItems[0];
4920 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4921 T* result = (T*)&pItem->Value;
4922 new(result)T(std::forward<Types>(args)...);
4926 template<
typename T>
4927 void VmaPoolAllocator<T>::Free(T* ptr)
4930 for(
size_t i = m_ItemBlocks.size(); i--; )
4932 ItemBlock& block = m_ItemBlocks[i];
4936 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4939 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4942 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
4943 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4944 block.FirstFreeIndex = index;
4948 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4951 template<
typename T>
4952 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4954 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4955 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4957 const ItemBlock newBlock = {
4958 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4962 m_ItemBlocks.push_back(newBlock);
4965 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4966 newBlock.pItems[i].NextFreeIndex = i + 1;
4967 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4968 return m_ItemBlocks.back();
4974 #if VMA_USE_STL_LIST
4976 #define VmaList std::list
4978 #else // #if VMA_USE_STL_LIST
4980 template<
typename T>
4989 template<
typename T>
4992 VMA_CLASS_NO_COPY(VmaRawList)
4994 typedef VmaListItem<T> ItemType;
4996 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5000 size_t GetCount()
const {
return m_Count; }
5001 bool IsEmpty()
const {
return m_Count == 0; }
5003 ItemType* Front() {
return m_pFront; }
5004 const ItemType* Front()
const {
return m_pFront; }
5005 ItemType* Back() {
return m_pBack; }
5006 const ItemType* Back()
const {
return m_pBack; }
5008 ItemType* PushBack();
5009 ItemType* PushFront();
5010 ItemType* PushBack(
const T& value);
5011 ItemType* PushFront(
const T& value);
5016 ItemType* InsertBefore(ItemType* pItem);
5018 ItemType* InsertAfter(ItemType* pItem);
5020 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5021 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5023 void Remove(ItemType* pItem);
5026 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5027 VmaPoolAllocator<ItemType> m_ItemAllocator;
5033 template<
typename T>
5034 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5035 m_pAllocationCallbacks(pAllocationCallbacks),
5036 m_ItemAllocator(pAllocationCallbacks, 128),
5043 template<
typename T>
5044 VmaRawList<T>::~VmaRawList()
5050 template<
typename T>
5051 void VmaRawList<T>::Clear()
5053 if(IsEmpty() ==
false)
5055 ItemType* pItem = m_pBack;
5056 while(pItem != VMA_NULL)
5058 ItemType*
const pPrevItem = pItem->pPrev;
5059 m_ItemAllocator.Free(pItem);
5062 m_pFront = VMA_NULL;
5068 template<
typename T>
5069 VmaListItem<T>* VmaRawList<T>::PushBack()
5071 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5072 pNewItem->pNext = VMA_NULL;
5075 pNewItem->pPrev = VMA_NULL;
5076 m_pFront = pNewItem;
5082 pNewItem->pPrev = m_pBack;
5083 m_pBack->pNext = pNewItem;
5090 template<
typename T>
5091 VmaListItem<T>* VmaRawList<T>::PushFront()
5093 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5094 pNewItem->pPrev = VMA_NULL;
5097 pNewItem->pNext = VMA_NULL;
5098 m_pFront = pNewItem;
5104 pNewItem->pNext = m_pFront;
5105 m_pFront->pPrev = pNewItem;
5106 m_pFront = pNewItem;
5112 template<
typename T>
5113 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5115 ItemType*
const pNewItem = PushBack();
5116 pNewItem->Value = value;
5120 template<
typename T>
5121 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5123 ItemType*
const pNewItem = PushFront();
5124 pNewItem->Value = value;
5128 template<
typename T>
5129 void VmaRawList<T>::PopBack()
5131 VMA_HEAVY_ASSERT(m_Count > 0);
5132 ItemType*
const pBackItem = m_pBack;
5133 ItemType*
const pPrevItem = pBackItem->pPrev;
5134 if(pPrevItem != VMA_NULL)
5136 pPrevItem->pNext = VMA_NULL;
5138 m_pBack = pPrevItem;
5139 m_ItemAllocator.Free(pBackItem);
5143 template<
typename T>
5144 void VmaRawList<T>::PopFront()
5146 VMA_HEAVY_ASSERT(m_Count > 0);
5147 ItemType*
const pFrontItem = m_pFront;
5148 ItemType*
const pNextItem = pFrontItem->pNext;
5149 if(pNextItem != VMA_NULL)
5151 pNextItem->pPrev = VMA_NULL;
5153 m_pFront = pNextItem;
5154 m_ItemAllocator.Free(pFrontItem);
5158 template<
typename T>
5159 void VmaRawList<T>::Remove(ItemType* pItem)
5161 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5162 VMA_HEAVY_ASSERT(m_Count > 0);
5164 if(pItem->pPrev != VMA_NULL)
5166 pItem->pPrev->pNext = pItem->pNext;
5170 VMA_HEAVY_ASSERT(m_pFront == pItem);
5171 m_pFront = pItem->pNext;
5174 if(pItem->pNext != VMA_NULL)
5176 pItem->pNext->pPrev = pItem->pPrev;
5180 VMA_HEAVY_ASSERT(m_pBack == pItem);
5181 m_pBack = pItem->pPrev;
5184 m_ItemAllocator.Free(pItem);
5188 template<
typename T>
5189 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5191 if(pItem != VMA_NULL)
5193 ItemType*
const prevItem = pItem->pPrev;
5194 ItemType*
const newItem = m_ItemAllocator.Alloc();
5195 newItem->pPrev = prevItem;
5196 newItem->pNext = pItem;
5197 pItem->pPrev = newItem;
5198 if(prevItem != VMA_NULL)
5200 prevItem->pNext = newItem;
5204 VMA_HEAVY_ASSERT(m_pFront == pItem);
5214 template<
typename T>
5215 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5217 if(pItem != VMA_NULL)
5219 ItemType*
const nextItem = pItem->pNext;
5220 ItemType*
const newItem = m_ItemAllocator.Alloc();
5221 newItem->pNext = nextItem;
5222 newItem->pPrev = pItem;
5223 pItem->pNext = newItem;
5224 if(nextItem != VMA_NULL)
5226 nextItem->pPrev = newItem;
5230 VMA_HEAVY_ASSERT(m_pBack == pItem);
5240 template<
typename T>
5241 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5243 ItemType*
const newItem = InsertBefore(pItem);
5244 newItem->Value = value;
5248 template<
typename T>
5249 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5251 ItemType*
const newItem = InsertAfter(pItem);
5252 newItem->Value = value;
5256 template<
typename T,
typename AllocatorT>
5259 VMA_CLASS_NO_COPY(VmaList)
5270 T& operator*()
const
5272 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5273 return m_pItem->Value;
5275 T* operator->()
const
5277 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5278 return &m_pItem->Value;
5281 iterator& operator++()
5283 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5284 m_pItem = m_pItem->pNext;
5287 iterator& operator--()
5289 if(m_pItem != VMA_NULL)
5291 m_pItem = m_pItem->pPrev;
5295 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5296 m_pItem = m_pList->Back();
5301 iterator operator++(
int)
5303 iterator result = *
this;
5307 iterator operator--(
int)
5309 iterator result = *
this;
5314 bool operator==(
const iterator& rhs)
const
5316 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5317 return m_pItem == rhs.m_pItem;
5319 bool operator!=(
const iterator& rhs)
const
5321 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5322 return m_pItem != rhs.m_pItem;
5326 VmaRawList<T>* m_pList;
5327 VmaListItem<T>* m_pItem;
5329 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5335 friend class VmaList<T, AllocatorT>;
5338 class const_iterator
5347 const_iterator(
const iterator& src) :
5348 m_pList(src.m_pList),
5349 m_pItem(src.m_pItem)
5353 const T& operator*()
const
5355 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5356 return m_pItem->Value;
5358 const T* operator->()
const
5360 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5361 return &m_pItem->Value;
5364 const_iterator& operator++()
5366 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5367 m_pItem = m_pItem->pNext;
5370 const_iterator& operator--()
5372 if(m_pItem != VMA_NULL)
5374 m_pItem = m_pItem->pPrev;
5378 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5379 m_pItem = m_pList->Back();
5384 const_iterator operator++(
int)
5386 const_iterator result = *
this;
5390 const_iterator operator--(
int)
5392 const_iterator result = *
this;
5397 bool operator==(
const const_iterator& rhs)
const
5399 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5400 return m_pItem == rhs.m_pItem;
5402 bool operator!=(
const const_iterator& rhs)
const
5404 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5405 return m_pItem != rhs.m_pItem;
5409 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5415 const VmaRawList<T>* m_pList;
5416 const VmaListItem<T>* m_pItem;
5418 friend class VmaList<T, AllocatorT>;
5421 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5423 bool empty()
const {
return m_RawList.IsEmpty(); }
5424 size_t size()
const {
return m_RawList.GetCount(); }
5426 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5427 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5429 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5430 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5432 void clear() { m_RawList.Clear(); }
5433 void push_back(
const T& value) { m_RawList.PushBack(value); }
5434 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5435 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5438 VmaRawList<T> m_RawList;
5441 #endif // #if VMA_USE_STL_LIST
5449 #if VMA_USE_STL_UNORDERED_MAP
5451 #define VmaPair std::pair
5453 #define VMA_MAP_TYPE(KeyT, ValueT) \
5454 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5456 #else // #if VMA_USE_STL_UNORDERED_MAP
5458 template<
typename T1,
typename T2>
5464 VmaPair() : first(), second() { }
5465 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5471 template<
typename KeyT,
typename ValueT>
5475 typedef VmaPair<KeyT, ValueT> PairType;
5476 typedef PairType* iterator;
5478 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5480 iterator begin() {
return m_Vector.begin(); }
5481 iterator end() {
return m_Vector.end(); }
5483 void insert(
const PairType& pair);
5484 iterator find(
const KeyT& key);
5485 void erase(iterator it);
5488 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5491 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5493 template<
typename FirstT,
typename SecondT>
5494 struct VmaPairFirstLess
5496 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5498 return lhs.first < rhs.first;
5500 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5502 return lhs.first < rhsFirst;
5506 template<
typename KeyT,
typename ValueT>
5507 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5509 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5511 m_Vector.data() + m_Vector.size(),
5513 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5514 VmaVectorInsert(m_Vector, indexToInsert, pair);
5517 template<
typename KeyT,
typename ValueT>
5518 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5520 PairType* it = VmaBinaryFindFirstNotLess(
5522 m_Vector.data() + m_Vector.size(),
5524 VmaPairFirstLess<KeyT, ValueT>());
5525 if((it != m_Vector.end()) && (it->first == key))
5531 return m_Vector.end();
5535 template<
typename KeyT,
typename ValueT>
5536 void VmaMap<KeyT, ValueT>::erase(iterator it)
5538 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5541 #endif // #if VMA_USE_STL_UNORDERED_MAP
5547 class VmaDeviceMemoryBlock;
5549 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5551 struct VmaAllocation_T
5554 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5558 FLAG_USER_DATA_STRING = 0x01,
5562 enum ALLOCATION_TYPE
5564 ALLOCATION_TYPE_NONE,
5565 ALLOCATION_TYPE_BLOCK,
5566 ALLOCATION_TYPE_DEDICATED,
5573 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
5576 m_pUserData{VMA_NULL},
5577 m_LastUseFrameIndex{currentFrameIndex},
5578 m_MemoryTypeIndex{0},
5579 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
5580 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
5582 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
5584 #if VMA_STATS_STRING_ENABLED
5585 m_CreationFrameIndex = currentFrameIndex;
5586 m_BufferImageUsage = 0;
5592 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5595 VMA_ASSERT(m_pUserData == VMA_NULL);
5598 void InitBlockAllocation(
5599 VmaDeviceMemoryBlock* block,
5600 VkDeviceSize offset,
5601 VkDeviceSize alignment,
5603 uint32_t memoryTypeIndex,
5604 VmaSuballocationType suballocationType,
5608 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5609 VMA_ASSERT(block != VMA_NULL);
5610 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5611 m_Alignment = alignment;
5613 m_MemoryTypeIndex = memoryTypeIndex;
5614 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5615 m_SuballocationType = (uint8_t)suballocationType;
5616 m_BlockAllocation.m_Block = block;
5617 m_BlockAllocation.m_Offset = offset;
5618 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5623 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5624 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5625 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5626 m_MemoryTypeIndex = 0;
5627 m_BlockAllocation.m_Block = VMA_NULL;
5628 m_BlockAllocation.m_Offset = 0;
5629 m_BlockAllocation.m_CanBecomeLost =
true;
5632 void ChangeBlockAllocation(
5634 VmaDeviceMemoryBlock* block,
5635 VkDeviceSize offset);
5637 void ChangeOffset(VkDeviceSize newOffset);
5640 void InitDedicatedAllocation(
5641 uint32_t memoryTypeIndex,
5642 VkDeviceMemory hMemory,
5643 VmaSuballocationType suballocationType,
5647 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5648 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5649 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5652 m_MemoryTypeIndex = memoryTypeIndex;
5653 m_SuballocationType = (uint8_t)suballocationType;
5654 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5655 m_DedicatedAllocation.m_hMemory = hMemory;
5656 m_DedicatedAllocation.m_pMappedData = pMappedData;
5659 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5660 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5661 VkDeviceSize GetSize()
const {
return m_Size; }
5662 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5663 void* GetUserData()
const {
return m_pUserData; }
5664 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5665 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5667 VmaDeviceMemoryBlock* GetBlock()
const
5669 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5670 return m_BlockAllocation.m_Block;
5672 VkDeviceSize GetOffset()
const;
5673 VkDeviceMemory GetMemory()
const;
5674 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5675 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5676 void* GetMappedData()
const;
5677 bool CanBecomeLost()
const;
5679 uint32_t GetLastUseFrameIndex()
const
5681 return m_LastUseFrameIndex.load();
5683 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5685 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5695 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5697 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5699 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5710 void BlockAllocMap();
5711 void BlockAllocUnmap();
5712 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5715 #if VMA_STATS_STRING_ENABLED
5716 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5717 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5719 void InitBufferImageUsage(uint32_t bufferImageUsage)
5721 VMA_ASSERT(m_BufferImageUsage == 0);
5722 m_BufferImageUsage = bufferImageUsage;
5725 void PrintParameters(
class VmaJsonWriter& json)
const;
5729 VkDeviceSize m_Alignment;
5730 VkDeviceSize m_Size;
5732 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5733 uint32_t m_MemoryTypeIndex;
5735 uint8_t m_SuballocationType;
5742 struct BlockAllocation
5744 VmaDeviceMemoryBlock* m_Block;
5745 VkDeviceSize m_Offset;
5746 bool m_CanBecomeLost;
5750 struct DedicatedAllocation
5752 VkDeviceMemory m_hMemory;
5753 void* m_pMappedData;
5759 BlockAllocation m_BlockAllocation;
5761 DedicatedAllocation m_DedicatedAllocation;
5764 #if VMA_STATS_STRING_ENABLED
5765 uint32_t m_CreationFrameIndex;
5766 uint32_t m_BufferImageUsage;
5776 struct VmaSuballocation
5778 VkDeviceSize offset;
5781 VmaSuballocationType type;
5785 struct VmaSuballocationOffsetLess
5787 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5789 return lhs.offset < rhs.offset;
5792 struct VmaSuballocationOffsetGreater
5794 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5796 return lhs.offset > rhs.offset;
5800 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5803 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5805 enum class VmaAllocationRequestType
5827 struct VmaAllocationRequest
5829 VkDeviceSize offset;
5830 VkDeviceSize sumFreeSize;
5831 VkDeviceSize sumItemSize;
5832 VmaSuballocationList::iterator item;
5833 size_t itemsToMakeLostCount;
5835 VmaAllocationRequestType type;
5837 VkDeviceSize CalcCost()
const
5839 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5847 class VmaBlockMetadata
5851 virtual ~VmaBlockMetadata() { }
5852 virtual void Init(VkDeviceSize size) { m_Size = size; }
5855 virtual bool Validate()
const = 0;
5856 VkDeviceSize GetSize()
const {
return m_Size; }
5857 virtual size_t GetAllocationCount()
const = 0;
5858 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5859 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5861 virtual bool IsEmpty()
const = 0;
5863 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5865 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5867 #if VMA_STATS_STRING_ENABLED
5868 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5874 virtual bool CreateAllocationRequest(
5875 uint32_t currentFrameIndex,
5876 uint32_t frameInUseCount,
5877 VkDeviceSize bufferImageGranularity,
5878 VkDeviceSize allocSize,
5879 VkDeviceSize allocAlignment,
5881 VmaSuballocationType allocType,
5882 bool canMakeOtherLost,
5885 VmaAllocationRequest* pAllocationRequest) = 0;
5887 virtual bool MakeRequestedAllocationsLost(
5888 uint32_t currentFrameIndex,
5889 uint32_t frameInUseCount,
5890 VmaAllocationRequest* pAllocationRequest) = 0;
5892 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5894 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5898 const VmaAllocationRequest& request,
5899 VmaSuballocationType type,
5900 VkDeviceSize allocSize,
5905 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5908 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5910 #if VMA_STATS_STRING_ENABLED
5911 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5912 VkDeviceSize unusedBytes,
5913 size_t allocationCount,
5914 size_t unusedRangeCount)
const;
5915 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5916 VkDeviceSize offset,
5918 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5919 VkDeviceSize offset,
5920 VkDeviceSize size)
const;
5921 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5925 VkDeviceSize m_Size;
5926 const VkAllocationCallbacks* m_pAllocationCallbacks;
5929 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
5930 VMA_ASSERT(0 && "Validation failed: " #cond); \
5934 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5936 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5939 virtual ~VmaBlockMetadata_Generic();
5940 virtual void Init(VkDeviceSize size);
5942 virtual bool Validate()
const;
5943 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5944 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5945 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5946 virtual bool IsEmpty()
const;
5948 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5949 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5951 #if VMA_STATS_STRING_ENABLED
5952 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5955 virtual bool CreateAllocationRequest(
5956 uint32_t currentFrameIndex,
5957 uint32_t frameInUseCount,
5958 VkDeviceSize bufferImageGranularity,
5959 VkDeviceSize allocSize,
5960 VkDeviceSize allocAlignment,
5962 VmaSuballocationType allocType,
5963 bool canMakeOtherLost,
5965 VmaAllocationRequest* pAllocationRequest);
5967 virtual bool MakeRequestedAllocationsLost(
5968 uint32_t currentFrameIndex,
5969 uint32_t frameInUseCount,
5970 VmaAllocationRequest* pAllocationRequest);
5972 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5974 virtual VkResult CheckCorruption(
const void* pBlockData);
5977 const VmaAllocationRequest& request,
5978 VmaSuballocationType type,
5979 VkDeviceSize allocSize,
5983 virtual void FreeAtOffset(VkDeviceSize offset);
5988 bool IsBufferImageGranularityConflictPossible(
5989 VkDeviceSize bufferImageGranularity,
5990 VmaSuballocationType& inOutPrevSuballocType)
const;
5993 friend class VmaDefragmentationAlgorithm_Generic;
5994 friend class VmaDefragmentationAlgorithm_Fast;
5996 uint32_t m_FreeCount;
5997 VkDeviceSize m_SumFreeSize;
5998 VmaSuballocationList m_Suballocations;
6001 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6003 bool ValidateFreeSuballocationList()
const;
6007 bool CheckAllocation(
6008 uint32_t currentFrameIndex,
6009 uint32_t frameInUseCount,
6010 VkDeviceSize bufferImageGranularity,
6011 VkDeviceSize allocSize,
6012 VkDeviceSize allocAlignment,
6013 VmaSuballocationType allocType,
6014 VmaSuballocationList::const_iterator suballocItem,
6015 bool canMakeOtherLost,
6016 VkDeviceSize* pOffset,
6017 size_t* itemsToMakeLostCount,
6018 VkDeviceSize* pSumFreeSize,
6019 VkDeviceSize* pSumItemSize)
const;
6021 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6025 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6028 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6031 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6112 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6114 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6117 virtual ~VmaBlockMetadata_Linear();
6118 virtual void Init(VkDeviceSize size);
6120 virtual bool Validate()
const;
6121 virtual size_t GetAllocationCount()
const;
6122 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6123 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6124 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6126 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6127 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6129 #if VMA_STATS_STRING_ENABLED
6130 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6133 virtual bool CreateAllocationRequest(
6134 uint32_t currentFrameIndex,
6135 uint32_t frameInUseCount,
6136 VkDeviceSize bufferImageGranularity,
6137 VkDeviceSize allocSize,
6138 VkDeviceSize allocAlignment,
6140 VmaSuballocationType allocType,
6141 bool canMakeOtherLost,
6143 VmaAllocationRequest* pAllocationRequest);
6145 virtual bool MakeRequestedAllocationsLost(
6146 uint32_t currentFrameIndex,
6147 uint32_t frameInUseCount,
6148 VmaAllocationRequest* pAllocationRequest);
6150 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6152 virtual VkResult CheckCorruption(
const void* pBlockData);
6155 const VmaAllocationRequest& request,
6156 VmaSuballocationType type,
6157 VkDeviceSize allocSize,
6161 virtual void FreeAtOffset(VkDeviceSize offset);
6171 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6173 enum SECOND_VECTOR_MODE
6175 SECOND_VECTOR_EMPTY,
6180 SECOND_VECTOR_RING_BUFFER,
6186 SECOND_VECTOR_DOUBLE_STACK,
6189 VkDeviceSize m_SumFreeSize;
6190 SuballocationVectorType m_Suballocations0, m_Suballocations1;
6191 uint32_t m_1stVectorIndex;
6192 SECOND_VECTOR_MODE m_2ndVectorMode;
6194 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6195 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6196 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6197 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6200 size_t m_1stNullItemsBeginCount;
6202 size_t m_1stNullItemsMiddleCount;
6204 size_t m_2ndNullItemsCount;
6206 bool ShouldCompact1st()
const;
6207 void CleanupAfterFree();
6209 bool CreateAllocationRequest_LowerAddress(
6210 uint32_t currentFrameIndex,
6211 uint32_t frameInUseCount,
6212 VkDeviceSize bufferImageGranularity,
6213 VkDeviceSize allocSize,
6214 VkDeviceSize allocAlignment,
6215 VmaSuballocationType allocType,
6216 bool canMakeOtherLost,
6218 VmaAllocationRequest* pAllocationRequest);
6219 bool CreateAllocationRequest_UpperAddress(
6220 uint32_t currentFrameIndex,
6221 uint32_t frameInUseCount,
6222 VkDeviceSize bufferImageGranularity,
6223 VkDeviceSize allocSize,
6224 VkDeviceSize allocAlignment,
6225 VmaSuballocationType allocType,
6226 bool canMakeOtherLost,
6228 VmaAllocationRequest* pAllocationRequest);
6242 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6244 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6247 virtual ~VmaBlockMetadata_Buddy();
6248 virtual void Init(VkDeviceSize size);
6250 virtual bool Validate()
const;
6251 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6252 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6253 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6254 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6256 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6257 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6259 #if VMA_STATS_STRING_ENABLED
6260 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6263 virtual bool CreateAllocationRequest(
6264 uint32_t currentFrameIndex,
6265 uint32_t frameInUseCount,
6266 VkDeviceSize bufferImageGranularity,
6267 VkDeviceSize allocSize,
6268 VkDeviceSize allocAlignment,
6270 VmaSuballocationType allocType,
6271 bool canMakeOtherLost,
6273 VmaAllocationRequest* pAllocationRequest);
6275 virtual bool MakeRequestedAllocationsLost(
6276 uint32_t currentFrameIndex,
6277 uint32_t frameInUseCount,
6278 VmaAllocationRequest* pAllocationRequest);
6280 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6282 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6285 const VmaAllocationRequest& request,
6286 VmaSuballocationType type,
6287 VkDeviceSize allocSize,
6290 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6291 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6294 static const VkDeviceSize MIN_NODE_SIZE = 32;
6295 static const size_t MAX_LEVELS = 30;
6297 struct ValidationContext
6299 size_t calculatedAllocationCount;
6300 size_t calculatedFreeCount;
6301 VkDeviceSize calculatedSumFreeSize;
6303 ValidationContext() :
6304 calculatedAllocationCount(0),
6305 calculatedFreeCount(0),
6306 calculatedSumFreeSize(0) { }
6311 VkDeviceSize offset;
6341 VkDeviceSize m_UsableSize;
6342 uint32_t m_LevelCount;
6348 } m_FreeList[MAX_LEVELS];
6350 size_t m_AllocationCount;
6354 VkDeviceSize m_SumFreeSize;
6356 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6357 void DeleteNode(Node* node);
6358 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6359 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6360 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6362 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6363 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6367 void AddToFreeListFront(uint32_t level, Node* node);
6371 void RemoveFromFreeList(uint32_t level, Node* node);
6373 #if VMA_STATS_STRING_ENABLED
6374 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6384 class VmaDeviceMemoryBlock
6386 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6388 VmaBlockMetadata* m_pMetadata;
6392 ~VmaDeviceMemoryBlock()
6394 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6395 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6402 uint32_t newMemoryTypeIndex,
6403 VkDeviceMemory newMemory,
6404 VkDeviceSize newSize,
6406 uint32_t algorithm);
6410 VmaPool GetParentPool()
const {
return m_hParentPool; }
6411 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6412 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6413 uint32_t GetId()
const {
return m_Id; }
6414 void* GetMappedData()
const {
return m_pMappedData; }
6417 bool Validate()
const;
6422 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6425 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6426 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6428 VkResult BindBufferMemory(
6431 VkDeviceSize allocationLocalOffset,
6434 VkResult BindImageMemory(
6437 VkDeviceSize allocationLocalOffset,
6443 uint32_t m_MemoryTypeIndex;
6445 VkDeviceMemory m_hMemory;
6453 uint32_t m_MapCount;
6454 void* m_pMappedData;
6457 struct VmaPointerLess
6459 bool operator()(
const void* lhs,
const void* rhs)
const
6465 struct VmaDefragmentationMove
6467 size_t srcBlockIndex;
6468 size_t dstBlockIndex;
6469 VkDeviceSize srcOffset;
6470 VkDeviceSize dstOffset;
6473 VmaDeviceMemoryBlock* pSrcBlock;
6474 VmaDeviceMemoryBlock* pDstBlock;
6477 class VmaDefragmentationAlgorithm;
6485 struct VmaBlockVector
6487 VMA_CLASS_NO_COPY(VmaBlockVector)
6492 uint32_t memoryTypeIndex,
6493 VkDeviceSize preferredBlockSize,
6494 size_t minBlockCount,
6495 size_t maxBlockCount,
6496 VkDeviceSize bufferImageGranularity,
6497 uint32_t frameInUseCount,
6498 bool explicitBlockSize,
6499 uint32_t algorithm);
6502 VkResult CreateMinBlocks();
6504 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6505 VmaPool GetParentPool()
const {
return m_hParentPool; }
6506 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6507 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6508 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6509 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6510 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6511 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6516 bool IsCorruptionDetectionEnabled()
const;
6519 uint32_t currentFrameIndex,
6521 VkDeviceSize alignment,
6523 VmaSuballocationType suballocType,
6524 size_t allocationCount,
6532 #if VMA_STATS_STRING_ENABLED
6533 void PrintDetailedMap(
class VmaJsonWriter& json);
6536 void MakePoolAllocationsLost(
6537 uint32_t currentFrameIndex,
6538 size_t* pLostAllocationCount);
6539 VkResult CheckCorruption();
6543 class VmaBlockVectorDefragmentationContext* pCtx,
6545 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6546 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6547 VkCommandBuffer commandBuffer);
6548 void DefragmentationEnd(
6549 class VmaBlockVectorDefragmentationContext* pCtx,
6552 uint32_t ProcessDefragmentations(
6553 class VmaBlockVectorDefragmentationContext *pCtx,
6556 void CommitDefragmentations(
6557 class VmaBlockVectorDefragmentationContext *pCtx,
6563 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6564 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6565 size_t CalcAllocationCount()
const;
6566 bool IsBufferImageGranularityConflictPossible()
const;
6569 friend class VmaDefragmentationAlgorithm_Generic;
6573 const uint32_t m_MemoryTypeIndex;
6574 const VkDeviceSize m_PreferredBlockSize;
6575 const size_t m_MinBlockCount;
6576 const size_t m_MaxBlockCount;
6577 const VkDeviceSize m_BufferImageGranularity;
6578 const uint32_t m_FrameInUseCount;
6579 const bool m_ExplicitBlockSize;
6580 const uint32_t m_Algorithm;
6581 VMA_RW_MUTEX m_Mutex;
6585 bool m_HasEmptyBlock;
6587 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6588 uint32_t m_NextBlockId;
6590 VkDeviceSize CalcMaxBlockSize()
const;
6593 void Remove(VmaDeviceMemoryBlock* pBlock);
6597 void IncrementallySortBlocks();
6599 VkResult AllocatePage(
6600 uint32_t currentFrameIndex,
6602 VkDeviceSize alignment,
6604 VmaSuballocationType suballocType,
6608 VkResult AllocateFromBlock(
6609 VmaDeviceMemoryBlock* pBlock,
6610 uint32_t currentFrameIndex,
6612 VkDeviceSize alignment,
6615 VmaSuballocationType suballocType,
6619 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6622 void ApplyDefragmentationMovesCpu(
6623 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6624 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6626 void ApplyDefragmentationMovesGpu(
6627 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6628 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6629 VkCommandBuffer commandBuffer);
6637 void UpdateHasEmptyBlock();
6642 VMA_CLASS_NO_COPY(VmaPool_T)
6644 VmaBlockVector m_BlockVector;
6649 VkDeviceSize preferredBlockSize);
6652 uint32_t GetId()
const {
return m_Id; }
6653 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6655 const char* GetName()
const {
return m_Name; }
6656 void SetName(
const char* pName);
6658 #if VMA_STATS_STRING_ENABLED
6674 class VmaDefragmentationAlgorithm
6676 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6678 VmaDefragmentationAlgorithm(
6680 VmaBlockVector* pBlockVector,
6681 uint32_t currentFrameIndex) :
6682 m_hAllocator(hAllocator),
6683 m_pBlockVector(pBlockVector),
6684 m_CurrentFrameIndex(currentFrameIndex)
6687 virtual ~VmaDefragmentationAlgorithm()
6691 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6692 virtual void AddAll() = 0;
6694 virtual VkResult Defragment(
6695 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6696 VkDeviceSize maxBytesToMove,
6697 uint32_t maxAllocationsToMove,
6700 virtual VkDeviceSize GetBytesMoved()
const = 0;
6701 virtual uint32_t GetAllocationsMoved()
const = 0;
6705 VmaBlockVector*
const m_pBlockVector;
6706 const uint32_t m_CurrentFrameIndex;
6708 struct AllocationInfo
6711 VkBool32* m_pChanged;
6714 m_hAllocation(VK_NULL_HANDLE),
6715 m_pChanged(VMA_NULL)
6719 m_hAllocation(hAlloc),
6720 m_pChanged(pChanged)
6726 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6728 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6730 VmaDefragmentationAlgorithm_Generic(
6732 VmaBlockVector* pBlockVector,
6733 uint32_t currentFrameIndex,
6734 bool overlappingMoveSupported);
6735 virtual ~VmaDefragmentationAlgorithm_Generic();
6737 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6738 virtual void AddAll() { m_AllAllocations =
true; }
6740 virtual VkResult Defragment(
6741 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6742 VkDeviceSize maxBytesToMove,
6743 uint32_t maxAllocationsToMove,
6746 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6747 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6750 uint32_t m_AllocationCount;
6751 bool m_AllAllocations;
6753 VkDeviceSize m_BytesMoved;
6754 uint32_t m_AllocationsMoved;
6756 struct AllocationInfoSizeGreater
6758 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6760 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6764 struct AllocationInfoOffsetGreater
6766 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6768 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6774 size_t m_OriginalBlockIndex;
6775 VmaDeviceMemoryBlock* m_pBlock;
6776 bool m_HasNonMovableAllocations;
6777 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6779 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6780 m_OriginalBlockIndex(SIZE_MAX),
6782 m_HasNonMovableAllocations(true),
6783 m_Allocations(pAllocationCallbacks)
6787 void CalcHasNonMovableAllocations()
6789 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6790 const size_t defragmentAllocCount = m_Allocations.size();
6791 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6794 void SortAllocationsBySizeDescending()
6796 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6799 void SortAllocationsByOffsetDescending()
6801 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6805 struct BlockPointerLess
6807 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
6809 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6811 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6813 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6819 struct BlockInfoCompareMoveDestination
6821 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6823 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6827 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6831 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6839 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6840 BlockInfoVector m_Blocks;
6842 VkResult DefragmentRound(
6843 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6844 VkDeviceSize maxBytesToMove,
6845 uint32_t maxAllocationsToMove,
6846 bool freeOldAllocations);
6848 size_t CalcBlocksWithNonMovableCount()
const;
6850 static bool MoveMakesSense(
6851 size_t dstBlockIndex, VkDeviceSize dstOffset,
6852 size_t srcBlockIndex, VkDeviceSize srcOffset);
6855 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6857 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6859 VmaDefragmentationAlgorithm_Fast(
6861 VmaBlockVector* pBlockVector,
6862 uint32_t currentFrameIndex,
6863 bool overlappingMoveSupported);
6864 virtual ~VmaDefragmentationAlgorithm_Fast();
6866 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6867 virtual void AddAll() { m_AllAllocations =
true; }
6869 virtual VkResult Defragment(
6870 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6871 VkDeviceSize maxBytesToMove,
6872 uint32_t maxAllocationsToMove,
6875 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6876 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6881 size_t origBlockIndex;
6884 class FreeSpaceDatabase
6890 s.blockInfoIndex = SIZE_MAX;
6891 for(
size_t i = 0; i < MAX_COUNT; ++i)
6893 m_FreeSpaces[i] = s;
6897 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6899 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6905 size_t bestIndex = SIZE_MAX;
6906 for(
size_t i = 0; i < MAX_COUNT; ++i)
6909 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6914 if(m_FreeSpaces[i].size < size &&
6915 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6921 if(bestIndex != SIZE_MAX)
6923 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6924 m_FreeSpaces[bestIndex].offset = offset;
6925 m_FreeSpaces[bestIndex].size = size;
6929 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6930 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6932 size_t bestIndex = SIZE_MAX;
6933 VkDeviceSize bestFreeSpaceAfter = 0;
6934 for(
size_t i = 0; i < MAX_COUNT; ++i)
6937 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6939 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6941 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6943 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6945 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6948 bestFreeSpaceAfter = freeSpaceAfter;
6954 if(bestIndex != SIZE_MAX)
6956 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6957 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6959 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6962 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6963 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6964 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6969 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6979 static const size_t MAX_COUNT = 4;
6983 size_t blockInfoIndex;
6984 VkDeviceSize offset;
6986 } m_FreeSpaces[MAX_COUNT];
6989 const bool m_OverlappingMoveSupported;
6991 uint32_t m_AllocationCount;
6992 bool m_AllAllocations;
6994 VkDeviceSize m_BytesMoved;
6995 uint32_t m_AllocationsMoved;
6997 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6999 void PreprocessMetadata();
7000 void PostprocessMetadata();
7001 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7004 struct VmaBlockDefragmentationContext
7008 BLOCK_FLAG_USED = 0x00000001,
7014 class VmaBlockVectorDefragmentationContext
7016 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7020 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7021 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7022 uint32_t defragmentationMovesProcessed;
7023 uint32_t defragmentationMovesCommitted;
7024 bool hasDefragmentationPlan;
7026 VmaBlockVectorDefragmentationContext(
7029 VmaBlockVector* pBlockVector,
7030 uint32_t currFrameIndex);
7031 ~VmaBlockVectorDefragmentationContext();
7033 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7034 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7035 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7037 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7038 void AddAll() { m_AllAllocations =
true; }
7047 VmaBlockVector*
const m_pBlockVector;
7048 const uint32_t m_CurrFrameIndex;
7050 VmaDefragmentationAlgorithm* m_pAlgorithm;
7058 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7059 bool m_AllAllocations;
7062 struct VmaDefragmentationContext_T
7065 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7067 VmaDefragmentationContext_T(
7069 uint32_t currFrameIndex,
7072 ~VmaDefragmentationContext_T();
7074 void AddPools(uint32_t poolCount,
VmaPool* pPools);
7075 void AddAllocations(
7076 uint32_t allocationCount,
7078 VkBool32* pAllocationsChanged);
7086 VkResult Defragment(
7087 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7088 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7092 VkResult DefragmentPassEnd();
7096 const uint32_t m_CurrFrameIndex;
7097 const uint32_t m_Flags;
7100 VkDeviceSize m_MaxCpuBytesToMove;
7101 uint32_t m_MaxCpuAllocationsToMove;
7102 VkDeviceSize m_MaxGpuBytesToMove;
7103 uint32_t m_MaxGpuAllocationsToMove;
7106 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7108 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7111 #if VMA_RECORDING_ENABLED
7118 void WriteConfiguration(
7119 const VkPhysicalDeviceProperties& devProps,
7120 const VkPhysicalDeviceMemoryProperties& memProps,
7121 uint32_t vulkanApiVersion,
7122 bool dedicatedAllocationExtensionEnabled,
7123 bool bindMemory2ExtensionEnabled,
7124 bool memoryBudgetExtensionEnabled,
7125 bool deviceCoherentMemoryExtensionEnabled);
7128 void RecordCreateAllocator(uint32_t frameIndex);
7129 void RecordDestroyAllocator(uint32_t frameIndex);
7130 void RecordCreatePool(uint32_t frameIndex,
7133 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7134 void RecordAllocateMemory(uint32_t frameIndex,
7135 const VkMemoryRequirements& vkMemReq,
7138 void RecordAllocateMemoryPages(uint32_t frameIndex,
7139 const VkMemoryRequirements& vkMemReq,
7141 uint64_t allocationCount,
7143 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7144 const VkMemoryRequirements& vkMemReq,
7145 bool requiresDedicatedAllocation,
7146 bool prefersDedicatedAllocation,
7149 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7150 const VkMemoryRequirements& vkMemReq,
7151 bool requiresDedicatedAllocation,
7152 bool prefersDedicatedAllocation,
7155 void RecordFreeMemory(uint32_t frameIndex,
7157 void RecordFreeMemoryPages(uint32_t frameIndex,
7158 uint64_t allocationCount,
7160 void RecordSetAllocationUserData(uint32_t frameIndex,
7162 const void* pUserData);
7163 void RecordCreateLostAllocation(uint32_t frameIndex,
7165 void RecordMapMemory(uint32_t frameIndex,
7167 void RecordUnmapMemory(uint32_t frameIndex,
7169 void RecordFlushAllocation(uint32_t frameIndex,
7170 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7171 void RecordInvalidateAllocation(uint32_t frameIndex,
7172 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7173 void RecordCreateBuffer(uint32_t frameIndex,
7174 const VkBufferCreateInfo& bufCreateInfo,
7177 void RecordCreateImage(uint32_t frameIndex,
7178 const VkImageCreateInfo& imageCreateInfo,
7181 void RecordDestroyBuffer(uint32_t frameIndex,
7183 void RecordDestroyImage(uint32_t frameIndex,
7185 void RecordTouchAllocation(uint32_t frameIndex,
7187 void RecordGetAllocationInfo(uint32_t frameIndex,
7189 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
7191 void RecordDefragmentationBegin(uint32_t frameIndex,
7194 void RecordDefragmentationEnd(uint32_t frameIndex,
7196 void RecordSetPoolName(uint32_t frameIndex,
7207 class UserDataString
7211 const char* GetString()
const {
return m_Str; }
7221 VMA_MUTEX m_FileMutex;
7223 int64_t m_StartCounter;
7225 void GetBasicParams(CallParams& outParams);
7228 template<
typename T>
7229 void PrintPointerList(uint64_t count,
const T* pItems)
7233 fprintf(m_File,
"%p", pItems[0]);
7234 for(uint64_t i = 1; i < count; ++i)
7236 fprintf(m_File,
" %p", pItems[i]);
7241 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
7245 #endif // #if VMA_RECORDING_ENABLED
7250 class VmaAllocationObjectAllocator
7252 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
7254 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7256 template<
typename... Types>
VmaAllocation Allocate(Types... args);
7261 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7264 struct VmaCurrentBudgetData
7266 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7267 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7269 #if VMA_MEMORY_BUDGET
7270 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7271 VMA_RW_MUTEX m_BudgetMutex;
7272 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7273 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7274 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7275 #endif // #if VMA_MEMORY_BUDGET
7277 VmaCurrentBudgetData()
7279 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7281 m_BlockBytes[heapIndex] = 0;
7282 m_AllocationBytes[heapIndex] = 0;
7283 #if VMA_MEMORY_BUDGET
7284 m_VulkanUsage[heapIndex] = 0;
7285 m_VulkanBudget[heapIndex] = 0;
7286 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7290 #if VMA_MEMORY_BUDGET
7291 m_OperationsSinceBudgetFetch = 0;
7295 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7297 m_AllocationBytes[heapIndex] += allocationSize;
7298 #if VMA_MEMORY_BUDGET
7299 ++m_OperationsSinceBudgetFetch;
7303 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7305 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7306 m_AllocationBytes[heapIndex] -= allocationSize;
7307 #if VMA_MEMORY_BUDGET
7308 ++m_OperationsSinceBudgetFetch;
7314 struct VmaAllocator_T
7316 VMA_CLASS_NO_COPY(VmaAllocator_T)
7319 uint32_t m_VulkanApiVersion;
7320 bool m_UseKhrDedicatedAllocation;
7321 bool m_UseKhrBindMemory2;
7322 bool m_UseExtMemoryBudget;
7323 bool m_UseAmdDeviceCoherentMemory;
7324 bool m_UseKhrBufferDeviceAddress;
7326 VkInstance m_hInstance;
7327 bool m_AllocationCallbacksSpecified;
7328 VkAllocationCallbacks m_AllocationCallbacks;
7330 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7333 uint32_t m_HeapSizeLimitMask;
7335 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7336 VkPhysicalDeviceMemoryProperties m_MemProps;
7339 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7342 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7343 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7344 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7346 VmaCurrentBudgetData m_Budget;
7352 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7354 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7358 return m_VulkanFunctions;
7361 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
7363 VkDeviceSize GetBufferImageGranularity()
const
7366 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7367 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7370 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7371 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7373 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7375 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7376 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7379 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7381 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7382 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7385 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7387 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7388 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7389 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7392 bool IsIntegratedGpu()
const
7394 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7397 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
7399 #if VMA_RECORDING_ENABLED
7400 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7403 void GetBufferMemoryRequirements(
7405 VkMemoryRequirements& memReq,
7406 bool& requiresDedicatedAllocation,
7407 bool& prefersDedicatedAllocation)
const;
7408 void GetImageMemoryRequirements(
7410 VkMemoryRequirements& memReq,
7411 bool& requiresDedicatedAllocation,
7412 bool& prefersDedicatedAllocation)
const;
7415 VkResult AllocateMemory(
7416 const VkMemoryRequirements& vkMemReq,
7417 bool requiresDedicatedAllocation,
7418 bool prefersDedicatedAllocation,
7419 VkBuffer dedicatedBuffer,
7420 VkBufferUsageFlags dedicatedBufferUsage,
7421 VkImage dedicatedImage,
7423 VmaSuballocationType suballocType,
7424 size_t allocationCount,
7429 size_t allocationCount,
7432 VkResult ResizeAllocation(
7434 VkDeviceSize newSize);
7436 void CalculateStats(
VmaStats* pStats);
7439 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7441 #if VMA_STATS_STRING_ENABLED
7442 void PrintDetailedMap(
class VmaJsonWriter& json);
7445 VkResult DefragmentationBegin(
7449 VkResult DefragmentationEnd(
7452 VkResult DefragmentationPassBegin(
7455 VkResult DefragmentationPassEnd(
7462 void DestroyPool(
VmaPool pool);
7465 void SetCurrentFrameIndex(uint32_t frameIndex);
7466 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7468 void MakePoolAllocationsLost(
7470 size_t* pLostAllocationCount);
7471 VkResult CheckPoolCorruption(
VmaPool hPool);
7472 VkResult CheckCorruption(uint32_t memoryTypeBits);
7477 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7479 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7481 VkResult BindVulkanBuffer(
7482 VkDeviceMemory memory,
7483 VkDeviceSize memoryOffset,
7487 VkResult BindVulkanImage(
7488 VkDeviceMemory memory,
7489 VkDeviceSize memoryOffset,
7496 VkResult BindBufferMemory(
7498 VkDeviceSize allocationLocalOffset,
7501 VkResult BindImageMemory(
7503 VkDeviceSize allocationLocalOffset,
7507 void FlushOrInvalidateAllocation(
7509 VkDeviceSize offset, VkDeviceSize size,
7510 VMA_CACHE_OPERATION op);
7512 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7518 uint32_t GetGpuDefragmentationMemoryTypeBits();
7521 VkDeviceSize m_PreferredLargeHeapBlockSize;
7523 VkPhysicalDevice m_PhysicalDevice;
7524 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7525 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7527 VMA_RW_MUTEX m_PoolsMutex;
7529 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7530 uint32_t m_NextPoolId;
7535 uint32_t m_GlobalMemoryTypeBits;
7537 #if VMA_RECORDING_ENABLED
7538 VmaRecorder* m_pRecorder;
7543 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7545 VkResult AllocateMemoryOfType(
7547 VkDeviceSize alignment,
7548 bool dedicatedAllocation,
7549 VkBuffer dedicatedBuffer,
7550 VkBufferUsageFlags dedicatedBufferUsage,
7551 VkImage dedicatedImage,
7553 uint32_t memTypeIndex,
7554 VmaSuballocationType suballocType,
7555 size_t allocationCount,
7559 VkResult AllocateDedicatedMemoryPage(
7561 VmaSuballocationType suballocType,
7562 uint32_t memTypeIndex,
7563 const VkMemoryAllocateInfo& allocInfo,
7565 bool isUserDataString,
7570 VkResult AllocateDedicatedMemory(
7572 VmaSuballocationType suballocType,
7573 uint32_t memTypeIndex,
7576 bool isUserDataString,
7578 VkBuffer dedicatedBuffer,
7579 VkBufferUsageFlags dedicatedBufferUsage,
7580 VkImage dedicatedImage,
7581 size_t allocationCount,
7590 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7592 uint32_t CalculateGlobalMemoryTypeBits()
const;
7594 #if VMA_MEMORY_BUDGET
7595 void UpdateVulkanBudget();
7596 #endif // #if VMA_MEMORY_BUDGET
7602 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7604 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7607 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7609 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7612 template<
typename T>
7615 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7618 template<
typename T>
7619 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7621 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7624 template<
typename T>
7625 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7630 VmaFree(hAllocator, ptr);
7634 template<
typename T>
7635 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7639 for(
size_t i = count; i--; )
7641 VmaFree(hAllocator, ptr);
7648 #if VMA_STATS_STRING_ENABLED
7650 class VmaStringBuilder
7653 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7654 size_t GetLength()
const {
return m_Data.size(); }
7655 const char* GetData()
const {
return m_Data.data(); }
7657 void Add(
char ch) { m_Data.push_back(ch); }
7658 void Add(
const char* pStr);
7659 void AddNewLine() { Add(
'\n'); }
7660 void AddNumber(uint32_t num);
7661 void AddNumber(uint64_t num);
7662 void AddPointer(
const void* ptr);
7665 VmaVector< char, VmaStlAllocator<char> > m_Data;
7668 void VmaStringBuilder::Add(
const char* pStr)
7670 const size_t strLen = strlen(pStr);
7673 const size_t oldCount = m_Data.size();
7674 m_Data.resize(oldCount + strLen);
7675 memcpy(m_Data.data() + oldCount, pStr, strLen);
7679 void VmaStringBuilder::AddNumber(uint32_t num)
7686 *--p =
'0' + (num % 10);
7693 void VmaStringBuilder::AddNumber(uint64_t num)
7700 *--p =
'0' + (num % 10);
7707 void VmaStringBuilder::AddPointer(
const void* ptr)
7710 VmaPtrToStr(buf,
sizeof(buf), ptr);
7714 #endif // #if VMA_STATS_STRING_ENABLED
7719 #if VMA_STATS_STRING_ENABLED
7723 VMA_CLASS_NO_COPY(VmaJsonWriter)
7725 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7728 void BeginObject(
bool singleLine =
false);
7731 void BeginArray(
bool singleLine =
false);
7734 void WriteString(
const char* pStr);
7735 void BeginString(
const char* pStr = VMA_NULL);
7736 void ContinueString(
const char* pStr);
7737 void ContinueString(uint32_t n);
7738 void ContinueString(uint64_t n);
7739 void ContinueString_Pointer(
const void* ptr);
7740 void EndString(
const char* pStr = VMA_NULL);
7742 void WriteNumber(uint32_t n);
7743 void WriteNumber(uint64_t n);
7744 void WriteBool(
bool b);
7748 static const char*
const INDENT;
7750 enum COLLECTION_TYPE
7752 COLLECTION_TYPE_OBJECT,
7753 COLLECTION_TYPE_ARRAY,
7757 COLLECTION_TYPE type;
7758 uint32_t valueCount;
7759 bool singleLineMode;
7762 VmaStringBuilder& m_SB;
7763 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7764 bool m_InsideString;
7766 void BeginValue(
bool isString);
7767 void WriteIndent(
bool oneLess =
false);
7770 const char*
const VmaJsonWriter::INDENT =
" ";
7772 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7774 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7775 m_InsideString(false)
7779 VmaJsonWriter::~VmaJsonWriter()
7781 VMA_ASSERT(!m_InsideString);
7782 VMA_ASSERT(m_Stack.empty());
7785 void VmaJsonWriter::BeginObject(
bool singleLine)
7787 VMA_ASSERT(!m_InsideString);
7793 item.type = COLLECTION_TYPE_OBJECT;
7794 item.valueCount = 0;
7795 item.singleLineMode = singleLine;
7796 m_Stack.push_back(item);
7799 void VmaJsonWriter::EndObject()
7801 VMA_ASSERT(!m_InsideString);
7806 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7810 void VmaJsonWriter::BeginArray(
bool singleLine)
7812 VMA_ASSERT(!m_InsideString);
7818 item.type = COLLECTION_TYPE_ARRAY;
7819 item.valueCount = 0;
7820 item.singleLineMode = singleLine;
7821 m_Stack.push_back(item);
7824 void VmaJsonWriter::EndArray()
7826 VMA_ASSERT(!m_InsideString);
7831 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7835 void VmaJsonWriter::WriteString(
const char* pStr)
7841 void VmaJsonWriter::BeginString(
const char* pStr)
7843 VMA_ASSERT(!m_InsideString);
7847 m_InsideString =
true;
7848 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7850 ContinueString(pStr);
7854 void VmaJsonWriter::ContinueString(
const char* pStr)
7856 VMA_ASSERT(m_InsideString);
7858 const size_t strLen = strlen(pStr);
7859 for(
size_t i = 0; i < strLen; ++i)
7892 VMA_ASSERT(0 &&
"Character not currently supported.");
7898 void VmaJsonWriter::ContinueString(uint32_t n)
7900 VMA_ASSERT(m_InsideString);
7904 void VmaJsonWriter::ContinueString(uint64_t n)
7906 VMA_ASSERT(m_InsideString);
7910 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7912 VMA_ASSERT(m_InsideString);
7913 m_SB.AddPointer(ptr);
7916 void VmaJsonWriter::EndString(
const char* pStr)
7918 VMA_ASSERT(m_InsideString);
7919 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7921 ContinueString(pStr);
7924 m_InsideString =
false;
7927 void VmaJsonWriter::WriteNumber(uint32_t n)
7929 VMA_ASSERT(!m_InsideString);
7934 void VmaJsonWriter::WriteNumber(uint64_t n)
7936 VMA_ASSERT(!m_InsideString);
7941 void VmaJsonWriter::WriteBool(
bool b)
7943 VMA_ASSERT(!m_InsideString);
7945 m_SB.Add(b ?
"true" :
"false");
7948 void VmaJsonWriter::WriteNull()
7950 VMA_ASSERT(!m_InsideString);
7955 void VmaJsonWriter::BeginValue(
bool isString)
7957 if(!m_Stack.empty())
7959 StackItem& currItem = m_Stack.back();
7960 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7961 currItem.valueCount % 2 == 0)
7963 VMA_ASSERT(isString);
7966 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7967 currItem.valueCount % 2 != 0)
7971 else if(currItem.valueCount > 0)
7980 ++currItem.valueCount;
7984 void VmaJsonWriter::WriteIndent(
bool oneLess)
7986 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7990 size_t count = m_Stack.size();
7991 if(count > 0 && oneLess)
7995 for(
size_t i = 0; i < count; ++i)
8002 #endif // #if VMA_STATS_STRING_ENABLED
8006 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8008 if(IsUserDataString())
8010 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8012 FreeUserDataString(hAllocator);
8014 if(pUserData != VMA_NULL)
8016 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8021 m_pUserData = pUserData;
8025 void VmaAllocation_T::ChangeBlockAllocation(
8027 VmaDeviceMemoryBlock* block,
8028 VkDeviceSize offset)
8030 VMA_ASSERT(block != VMA_NULL);
8031 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8034 if(block != m_BlockAllocation.m_Block)
8036 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8037 if(IsPersistentMap())
8039 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8040 block->Map(hAllocator, mapRefCount, VMA_NULL);
8043 m_BlockAllocation.m_Block = block;
8044 m_BlockAllocation.m_Offset = offset;
8047 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8049 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8050 m_BlockAllocation.m_Offset = newOffset;
8053 VkDeviceSize VmaAllocation_T::GetOffset()
const
8057 case ALLOCATION_TYPE_BLOCK:
8058 return m_BlockAllocation.m_Offset;
8059 case ALLOCATION_TYPE_DEDICATED:
8067 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8071 case ALLOCATION_TYPE_BLOCK:
8072 return m_BlockAllocation.m_Block->GetDeviceMemory();
8073 case ALLOCATION_TYPE_DEDICATED:
8074 return m_DedicatedAllocation.m_hMemory;
8077 return VK_NULL_HANDLE;
8081 void* VmaAllocation_T::GetMappedData()
const
8085 case ALLOCATION_TYPE_BLOCK:
8088 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8089 VMA_ASSERT(pBlockData != VMA_NULL);
8090 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8097 case ALLOCATION_TYPE_DEDICATED:
8098 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8099 return m_DedicatedAllocation.m_pMappedData;
8106 bool VmaAllocation_T::CanBecomeLost()
const
8110 case ALLOCATION_TYPE_BLOCK:
8111 return m_BlockAllocation.m_CanBecomeLost;
8112 case ALLOCATION_TYPE_DEDICATED:
8120 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8122 VMA_ASSERT(CanBecomeLost());
8128 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8131 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8136 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8142 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8152 #if VMA_STATS_STRING_ENABLED
8155 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
8164 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
8166 json.WriteString(
"Type");
8167 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
8169 json.WriteString(
"Size");
8170 json.WriteNumber(m_Size);
8172 if(m_pUserData != VMA_NULL)
8174 json.WriteString(
"UserData");
8175 if(IsUserDataString())
8177 json.WriteString((
const char*)m_pUserData);
8182 json.ContinueString_Pointer(m_pUserData);
8187 json.WriteString(
"CreationFrameIndex");
8188 json.WriteNumber(m_CreationFrameIndex);
8190 json.WriteString(
"LastUseFrameIndex");
8191 json.WriteNumber(GetLastUseFrameIndex());
8193 if(m_BufferImageUsage != 0)
8195 json.WriteString(
"Usage");
8196 json.WriteNumber(m_BufferImageUsage);
8202 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
8204 VMA_ASSERT(IsUserDataString());
8205 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
8206 m_pUserData = VMA_NULL;
8209 void VmaAllocation_T::BlockAllocMap()
8211 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8213 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8219 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
8223 void VmaAllocation_T::BlockAllocUnmap()
8225 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8227 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8233 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
8237 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
8239 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8243 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8245 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
8246 *ppData = m_DedicatedAllocation.m_pMappedData;
8252 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
8253 return VK_ERROR_MEMORY_MAP_FAILED;
8258 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
8259 hAllocator->m_hDevice,
8260 m_DedicatedAllocation.m_hMemory,
8265 if(result == VK_SUCCESS)
8267 m_DedicatedAllocation.m_pMappedData = *ppData;
8274 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
8276 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8278 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8283 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8284 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8285 hAllocator->m_hDevice,
8286 m_DedicatedAllocation.m_hMemory);
8291 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8295 #if VMA_STATS_STRING_ENABLED
8297 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8301 json.WriteString(
"Blocks");
8304 json.WriteString(
"Allocations");
8307 json.WriteString(
"UnusedRanges");
8310 json.WriteString(
"UsedBytes");
8313 json.WriteString(
"UnusedBytes");
8318 json.WriteString(
"AllocationSize");
8319 json.BeginObject(
true);
8320 json.WriteString(
"Min");
8322 json.WriteString(
"Avg");
8324 json.WriteString(
"Max");
8331 json.WriteString(
"UnusedRangeSize");
8332 json.BeginObject(
true);
8333 json.WriteString(
"Min");
8335 json.WriteString(
"Avg");
8337 json.WriteString(
"Max");
8345 #endif // #if VMA_STATS_STRING_ENABLED
8347 struct VmaSuballocationItemSizeLess
8350 const VmaSuballocationList::iterator lhs,
8351 const VmaSuballocationList::iterator rhs)
const
8353 return lhs->size < rhs->size;
8356 const VmaSuballocationList::iterator lhs,
8357 VkDeviceSize rhsSize)
const
8359 return lhs->size < rhsSize;
8367 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8369 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8373 #if VMA_STATS_STRING_ENABLED
8375 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8376 VkDeviceSize unusedBytes,
8377 size_t allocationCount,
8378 size_t unusedRangeCount)
const
8382 json.WriteString(
"TotalBytes");
8383 json.WriteNumber(GetSize());
8385 json.WriteString(
"UnusedBytes");
8386 json.WriteNumber(unusedBytes);
8388 json.WriteString(
"Allocations");
8389 json.WriteNumber((uint64_t)allocationCount);
8391 json.WriteString(
"UnusedRanges");
8392 json.WriteNumber((uint64_t)unusedRangeCount);
8394 json.WriteString(
"Suballocations");
8398 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8399 VkDeviceSize offset,
8402 json.BeginObject(
true);
8404 json.WriteString(
"Offset");
8405 json.WriteNumber(offset);
8407 hAllocation->PrintParameters(json);
8412 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8413 VkDeviceSize offset,
8414 VkDeviceSize size)
const
8416 json.BeginObject(
true);
8418 json.WriteString(
"Offset");
8419 json.WriteNumber(offset);
8421 json.WriteString(
"Type");
8422 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8424 json.WriteString(
"Size");
8425 json.WriteNumber(size);
8430 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8436 #endif // #if VMA_STATS_STRING_ENABLED
8441 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8442 VmaBlockMetadata(hAllocator),
8445 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8446 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8450 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8454 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8456 VmaBlockMetadata::Init(size);
8459 m_SumFreeSize = size;
8461 VmaSuballocation suballoc = {};
8462 suballoc.offset = 0;
8463 suballoc.size = size;
8464 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8465 suballoc.hAllocation = VK_NULL_HANDLE;
8467 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8468 m_Suballocations.push_back(suballoc);
8469 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8471 m_FreeSuballocationsBySize.push_back(suballocItem);
8474 bool VmaBlockMetadata_Generic::Validate()
const
8476 VMA_VALIDATE(!m_Suballocations.empty());
8479 VkDeviceSize calculatedOffset = 0;
8481 uint32_t calculatedFreeCount = 0;
8483 VkDeviceSize calculatedSumFreeSize = 0;
8486 size_t freeSuballocationsToRegister = 0;
8488 bool prevFree =
false;
8490 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8491 suballocItem != m_Suballocations.cend();
8494 const VmaSuballocation& subAlloc = *suballocItem;
8497 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8499 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8501 VMA_VALIDATE(!prevFree || !currFree);
8503 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8507 calculatedSumFreeSize += subAlloc.size;
8508 ++calculatedFreeCount;
8509 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8511 ++freeSuballocationsToRegister;
8515 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8519 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8520 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8523 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8526 calculatedOffset += subAlloc.size;
8527 prevFree = currFree;
8532 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8534 VkDeviceSize lastSize = 0;
8535 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8537 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8540 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8542 VMA_VALIDATE(suballocItem->size >= lastSize);
8544 lastSize = suballocItem->size;
8548 VMA_VALIDATE(ValidateFreeSuballocationList());
8549 VMA_VALIDATE(calculatedOffset == GetSize());
8550 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8551 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8556 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8558 if(!m_FreeSuballocationsBySize.empty())
8560 return m_FreeSuballocationsBySize.back()->size;
8568 bool VmaBlockMetadata_Generic::IsEmpty()
const
8570 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8573 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8577 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8589 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8590 suballocItem != m_Suballocations.cend();
8593 const VmaSuballocation& suballoc = *suballocItem;
8594 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8607 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8609 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8611 inoutStats.
size += GetSize();
8618 #if VMA_STATS_STRING_ENABLED
8620 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8622 PrintDetailedMap_Begin(json,
8624 m_Suballocations.size() - (
size_t)m_FreeCount,
8628 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8629 suballocItem != m_Suballocations.cend();
8630 ++suballocItem, ++i)
8632 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8634 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8638 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8642 PrintDetailedMap_End(json);
8645 #endif // #if VMA_STATS_STRING_ENABLED
8647 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8648 uint32_t currentFrameIndex,
8649 uint32_t frameInUseCount,
8650 VkDeviceSize bufferImageGranularity,
8651 VkDeviceSize allocSize,
8652 VkDeviceSize allocAlignment,
8654 VmaSuballocationType allocType,
8655 bool canMakeOtherLost,
8657 VmaAllocationRequest* pAllocationRequest)
8659 VMA_ASSERT(allocSize > 0);
8660 VMA_ASSERT(!upperAddress);
8661 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8662 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8663 VMA_HEAVY_ASSERT(Validate());
8665 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8668 if(canMakeOtherLost ==
false &&
8669 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8675 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8676 if(freeSuballocCount > 0)
8681 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8682 m_FreeSuballocationsBySize.data(),
8683 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8684 allocSize + 2 * VMA_DEBUG_MARGIN,
8685 VmaSuballocationItemSizeLess());
8686 size_t index = it - m_FreeSuballocationsBySize.data();
8687 for(; index < freeSuballocCount; ++index)
8692 bufferImageGranularity,
8696 m_FreeSuballocationsBySize[index],
8698 &pAllocationRequest->offset,
8699 &pAllocationRequest->itemsToMakeLostCount,
8700 &pAllocationRequest->sumFreeSize,
8701 &pAllocationRequest->sumItemSize))
8703 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8708 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8710 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8711 it != m_Suballocations.end();
8714 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8717 bufferImageGranularity,
8723 &pAllocationRequest->offset,
8724 &pAllocationRequest->itemsToMakeLostCount,
8725 &pAllocationRequest->sumFreeSize,
8726 &pAllocationRequest->sumItemSize))
8728 pAllocationRequest->item = it;
8736 for(
size_t index = freeSuballocCount; index--; )
8741 bufferImageGranularity,
8745 m_FreeSuballocationsBySize[index],
8747 &pAllocationRequest->offset,
8748 &pAllocationRequest->itemsToMakeLostCount,
8749 &pAllocationRequest->sumFreeSize,
8750 &pAllocationRequest->sumItemSize))
8752 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8759 if(canMakeOtherLost)
8764 VmaAllocationRequest tmpAllocRequest = {};
8765 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8766 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8767 suballocIt != m_Suballocations.end();
8770 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8771 suballocIt->hAllocation->CanBecomeLost())
8776 bufferImageGranularity,
8782 &tmpAllocRequest.offset,
8783 &tmpAllocRequest.itemsToMakeLostCount,
8784 &tmpAllocRequest.sumFreeSize,
8785 &tmpAllocRequest.sumItemSize))
8789 *pAllocationRequest = tmpAllocRequest;
8790 pAllocationRequest->item = suballocIt;
8793 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8795 *pAllocationRequest = tmpAllocRequest;
8796 pAllocationRequest->item = suballocIt;
8809 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8810 uint32_t currentFrameIndex,
8811 uint32_t frameInUseCount,
8812 VmaAllocationRequest* pAllocationRequest)
8814 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8816 while(pAllocationRequest->itemsToMakeLostCount > 0)
8818 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8820 ++pAllocationRequest->item;
8822 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8823 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8824 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8825 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8827 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8828 --pAllocationRequest->itemsToMakeLostCount;
8836 VMA_HEAVY_ASSERT(Validate());
8837 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8838 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8843 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8845 uint32_t lostAllocationCount = 0;
8846 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8847 it != m_Suballocations.end();
8850 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8851 it->hAllocation->CanBecomeLost() &&
8852 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8854 it = FreeSuballocation(it);
8855 ++lostAllocationCount;
8858 return lostAllocationCount;
8861 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8863 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8864 it != m_Suballocations.end();
8867 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8869 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8871 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8872 return VK_ERROR_VALIDATION_FAILED_EXT;
8874 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8876 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8877 return VK_ERROR_VALIDATION_FAILED_EXT;
8885 void VmaBlockMetadata_Generic::Alloc(
8886 const VmaAllocationRequest& request,
8887 VmaSuballocationType type,
8888 VkDeviceSize allocSize,
8891 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8892 VMA_ASSERT(request.item != m_Suballocations.end());
8893 VmaSuballocation& suballoc = *request.item;
8895 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8897 VMA_ASSERT(request.offset >= suballoc.offset);
8898 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8899 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8900 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8904 UnregisterFreeSuballocation(request.item);
8906 suballoc.offset = request.offset;
8907 suballoc.size = allocSize;
8908 suballoc.type = type;
8909 suballoc.hAllocation = hAllocation;
8914 VmaSuballocation paddingSuballoc = {};
8915 paddingSuballoc.offset = request.offset + allocSize;
8916 paddingSuballoc.size = paddingEnd;
8917 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8918 VmaSuballocationList::iterator next = request.item;
8920 const VmaSuballocationList::iterator paddingEndItem =
8921 m_Suballocations.insert(next, paddingSuballoc);
8922 RegisterFreeSuballocation(paddingEndItem);
8928 VmaSuballocation paddingSuballoc = {};
8929 paddingSuballoc.offset = request.offset - paddingBegin;
8930 paddingSuballoc.size = paddingBegin;
8931 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8932 const VmaSuballocationList::iterator paddingBeginItem =
8933 m_Suballocations.insert(request.item, paddingSuballoc);
8934 RegisterFreeSuballocation(paddingBeginItem);
8938 m_FreeCount = m_FreeCount - 1;
8939 if(paddingBegin > 0)
8947 m_SumFreeSize -= allocSize;
8950 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8952 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8953 suballocItem != m_Suballocations.end();
8956 VmaSuballocation& suballoc = *suballocItem;
8957 if(suballoc.hAllocation == allocation)
8959 FreeSuballocation(suballocItem);
8960 VMA_HEAVY_ASSERT(Validate());
8964 VMA_ASSERT(0 &&
"Not found!");
8967 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8969 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8970 suballocItem != m_Suballocations.end();
8973 VmaSuballocation& suballoc = *suballocItem;
8974 if(suballoc.offset == offset)
8976 FreeSuballocation(suballocItem);
8980 VMA_ASSERT(0 &&
"Not found!");
8983 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
8985 VkDeviceSize lastSize = 0;
8986 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8988 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8990 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8991 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8992 VMA_VALIDATE(it->size >= lastSize);
8993 lastSize = it->size;
8998 bool VmaBlockMetadata_Generic::CheckAllocation(
8999 uint32_t currentFrameIndex,
9000 uint32_t frameInUseCount,
9001 VkDeviceSize bufferImageGranularity,
9002 VkDeviceSize allocSize,
9003 VkDeviceSize allocAlignment,
9004 VmaSuballocationType allocType,
9005 VmaSuballocationList::const_iterator suballocItem,
9006 bool canMakeOtherLost,
9007 VkDeviceSize* pOffset,
9008 size_t* itemsToMakeLostCount,
9009 VkDeviceSize* pSumFreeSize,
9010 VkDeviceSize* pSumItemSize)
const
9012 VMA_ASSERT(allocSize > 0);
9013 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9014 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9015 VMA_ASSERT(pOffset != VMA_NULL);
9017 *itemsToMakeLostCount = 0;
9021 if(canMakeOtherLost)
9023 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9025 *pSumFreeSize = suballocItem->size;
9029 if(suballocItem->hAllocation->CanBecomeLost() &&
9030 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9032 ++*itemsToMakeLostCount;
9033 *pSumItemSize = suballocItem->size;
9042 if(GetSize() - suballocItem->offset < allocSize)
9048 *pOffset = suballocItem->offset;
9051 if(VMA_DEBUG_MARGIN > 0)
9053 *pOffset += VMA_DEBUG_MARGIN;
9057 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9061 if(bufferImageGranularity > 1)
9063 bool bufferImageGranularityConflict =
false;
9064 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9065 while(prevSuballocItem != m_Suballocations.cbegin())
9068 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9069 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9071 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9073 bufferImageGranularityConflict =
true;
9081 if(bufferImageGranularityConflict)
9083 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9089 if(*pOffset >= suballocItem->offset + suballocItem->size)
9095 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9098 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9100 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9102 if(suballocItem->offset + totalSize > GetSize())
9109 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9110 if(totalSize > suballocItem->size)
9112 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9113 while(remainingSize > 0)
9116 if(lastSuballocItem == m_Suballocations.cend())
9120 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9122 *pSumFreeSize += lastSuballocItem->size;
9126 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9127 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9128 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9130 ++*itemsToMakeLostCount;
9131 *pSumItemSize += lastSuballocItem->size;
9138 remainingSize = (lastSuballocItem->size < remainingSize) ?
9139 remainingSize - lastSuballocItem->size : 0;
9145 if(bufferImageGranularity > 1)
9147 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9149 while(nextSuballocItem != m_Suballocations.cend())
9151 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9152 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9154 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9156 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9157 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9158 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9160 ++*itemsToMakeLostCount;
9179 const VmaSuballocation& suballoc = *suballocItem;
9180 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9182 *pSumFreeSize = suballoc.size;
9185 if(suballoc.size < allocSize)
9191 *pOffset = suballoc.offset;
9194 if(VMA_DEBUG_MARGIN > 0)
9196 *pOffset += VMA_DEBUG_MARGIN;
9200 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9204 if(bufferImageGranularity > 1)
9206 bool bufferImageGranularityConflict =
false;
9207 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9208 while(prevSuballocItem != m_Suballocations.cbegin())
9211 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9212 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9214 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9216 bufferImageGranularityConflict =
true;
9224 if(bufferImageGranularityConflict)
9226 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9231 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
9234 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9237 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
9244 if(bufferImageGranularity > 1)
9246 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
9248 while(nextSuballocItem != m_Suballocations.cend())
9250 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9251 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9253 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9272 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
9274 VMA_ASSERT(item != m_Suballocations.end());
9275 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9277 VmaSuballocationList::iterator nextItem = item;
9279 VMA_ASSERT(nextItem != m_Suballocations.end());
9280 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9282 item->size += nextItem->size;
9284 m_Suballocations.erase(nextItem);
9287 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9290 VmaSuballocation& suballoc = *suballocItem;
9291 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9292 suballoc.hAllocation = VK_NULL_HANDLE;
9296 m_SumFreeSize += suballoc.size;
9299 bool mergeWithNext =
false;
9300 bool mergeWithPrev =
false;
9302 VmaSuballocationList::iterator nextItem = suballocItem;
9304 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9306 mergeWithNext =
true;
9309 VmaSuballocationList::iterator prevItem = suballocItem;
9310 if(suballocItem != m_Suballocations.begin())
9313 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9315 mergeWithPrev =
true;
9321 UnregisterFreeSuballocation(nextItem);
9322 MergeFreeWithNext(suballocItem);
9327 UnregisterFreeSuballocation(prevItem);
9328 MergeFreeWithNext(prevItem);
9329 RegisterFreeSuballocation(prevItem);
9334 RegisterFreeSuballocation(suballocItem);
9335 return suballocItem;
9339 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9341 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9342 VMA_ASSERT(item->size > 0);
9346 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9348 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9350 if(m_FreeSuballocationsBySize.empty())
9352 m_FreeSuballocationsBySize.push_back(item);
9356 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9364 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9366 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9367 VMA_ASSERT(item->size > 0);
9371 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9373 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9375 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9376 m_FreeSuballocationsBySize.data(),
9377 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9379 VmaSuballocationItemSizeLess());
9380 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9381 index < m_FreeSuballocationsBySize.size();
9384 if(m_FreeSuballocationsBySize[index] == item)
9386 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9389 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9391 VMA_ASSERT(0 &&
"Not found.");
9397 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9398 VkDeviceSize bufferImageGranularity,
9399 VmaSuballocationType& inOutPrevSuballocType)
const
9401 if(bufferImageGranularity == 1 || IsEmpty())
9406 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9407 bool typeConflictFound =
false;
9408 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9409 it != m_Suballocations.cend();
9412 const VmaSuballocationType suballocType = it->type;
9413 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9415 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9416 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9418 typeConflictFound =
true;
9420 inOutPrevSuballocType = suballocType;
9424 return typeConflictFound || minAlignment >= bufferImageGranularity;
9430 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9431 VmaBlockMetadata(hAllocator),
9433 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9434 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9435 m_1stVectorIndex(0),
9436 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9437 m_1stNullItemsBeginCount(0),
9438 m_1stNullItemsMiddleCount(0),
9439 m_2ndNullItemsCount(0)
9443 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9447 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9449 VmaBlockMetadata::Init(size);
9450 m_SumFreeSize = size;
9453 bool VmaBlockMetadata_Linear::Validate()
const
9455 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9456 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9458 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9459 VMA_VALIDATE(!suballocations1st.empty() ||
9460 suballocations2nd.empty() ||
9461 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9463 if(!suballocations1st.empty())
9466 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9468 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9470 if(!suballocations2nd.empty())
9473 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9476 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9477 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9479 VkDeviceSize sumUsedSize = 0;
9480 const size_t suballoc1stCount = suballocations1st.size();
9481 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9483 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9485 const size_t suballoc2ndCount = suballocations2nd.size();
9486 size_t nullItem2ndCount = 0;
9487 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9489 const VmaSuballocation& suballoc = suballocations2nd[i];
9490 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9492 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9493 VMA_VALIDATE(suballoc.offset >= offset);
9497 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9498 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9499 sumUsedSize += suballoc.size;
9506 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9509 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9512 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9514 const VmaSuballocation& suballoc = suballocations1st[i];
9515 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9516 suballoc.hAllocation == VK_NULL_HANDLE);
9519 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9521 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9523 const VmaSuballocation& suballoc = suballocations1st[i];
9524 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9526 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9527 VMA_VALIDATE(suballoc.offset >= offset);
9528 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9532 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9533 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9534 sumUsedSize += suballoc.size;
9541 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9543 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9545 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9547 const size_t suballoc2ndCount = suballocations2nd.size();
9548 size_t nullItem2ndCount = 0;
9549 for(
size_t i = suballoc2ndCount; i--; )
9551 const VmaSuballocation& suballoc = suballocations2nd[i];
9552 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9554 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9555 VMA_VALIDATE(suballoc.offset >= offset);
9559 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9560 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9561 sumUsedSize += suballoc.size;
9568 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9571 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9574 VMA_VALIDATE(offset <= GetSize());
9575 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9580 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9582 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9583 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9586 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9588 const VkDeviceSize size = GetSize();
9600 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9602 switch(m_2ndVectorMode)
9604 case SECOND_VECTOR_EMPTY:
9610 const size_t suballocations1stCount = suballocations1st.size();
9611 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9612 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9613 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9615 firstSuballoc.offset,
9616 size - (lastSuballoc.offset + lastSuballoc.size));
9620 case SECOND_VECTOR_RING_BUFFER:
9625 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9626 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9627 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9628 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9632 case SECOND_VECTOR_DOUBLE_STACK:
9637 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9638 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9639 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9640 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9650 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9652 const VkDeviceSize size = GetSize();
9653 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9654 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9655 const size_t suballoc1stCount = suballocations1st.size();
9656 const size_t suballoc2ndCount = suballocations2nd.size();
9667 VkDeviceSize lastOffset = 0;
9669 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9671 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9672 size_t nextAlloc2ndIndex = 0;
9673 while(lastOffset < freeSpace2ndTo1stEnd)
9676 while(nextAlloc2ndIndex < suballoc2ndCount &&
9677 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9679 ++nextAlloc2ndIndex;
9683 if(nextAlloc2ndIndex < suballoc2ndCount)
9685 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9688 if(lastOffset < suballoc.offset)
9691 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9705 lastOffset = suballoc.offset + suballoc.size;
9706 ++nextAlloc2ndIndex;
9712 if(lastOffset < freeSpace2ndTo1stEnd)
9714 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9722 lastOffset = freeSpace2ndTo1stEnd;
9727 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9728 const VkDeviceSize freeSpace1stTo2ndEnd =
9729 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9730 while(lastOffset < freeSpace1stTo2ndEnd)
9733 while(nextAlloc1stIndex < suballoc1stCount &&
9734 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9736 ++nextAlloc1stIndex;
9740 if(nextAlloc1stIndex < suballoc1stCount)
9742 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9745 if(lastOffset < suballoc.offset)
9748 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9762 lastOffset = suballoc.offset + suballoc.size;
9763 ++nextAlloc1stIndex;
9769 if(lastOffset < freeSpace1stTo2ndEnd)
9771 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9779 lastOffset = freeSpace1stTo2ndEnd;
9783 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9785 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9786 while(lastOffset < size)
9789 while(nextAlloc2ndIndex != SIZE_MAX &&
9790 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9792 --nextAlloc2ndIndex;
9796 if(nextAlloc2ndIndex != SIZE_MAX)
9798 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9801 if(lastOffset < suballoc.offset)
9804 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9818 lastOffset = suballoc.offset + suballoc.size;
9819 --nextAlloc2ndIndex;
9825 if(lastOffset < size)
9827 const VkDeviceSize unusedRangeSize = size - lastOffset;
9843 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
9845 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9846 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9847 const VkDeviceSize size = GetSize();
9848 const size_t suballoc1stCount = suballocations1st.size();
9849 const size_t suballoc2ndCount = suballocations2nd.size();
9851 inoutStats.
size += size;
9853 VkDeviceSize lastOffset = 0;
9855 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9857 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9858 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9859 while(lastOffset < freeSpace2ndTo1stEnd)
9862 while(nextAlloc2ndIndex < suballoc2ndCount &&
9863 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9865 ++nextAlloc2ndIndex;
9869 if(nextAlloc2ndIndex < suballoc2ndCount)
9871 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9874 if(lastOffset < suballoc.offset)
9877 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9888 lastOffset = suballoc.offset + suballoc.size;
9889 ++nextAlloc2ndIndex;
9894 if(lastOffset < freeSpace2ndTo1stEnd)
9897 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9904 lastOffset = freeSpace2ndTo1stEnd;
9909 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9910 const VkDeviceSize freeSpace1stTo2ndEnd =
9911 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9912 while(lastOffset < freeSpace1stTo2ndEnd)
9915 while(nextAlloc1stIndex < suballoc1stCount &&
9916 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9918 ++nextAlloc1stIndex;
9922 if(nextAlloc1stIndex < suballoc1stCount)
9924 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9927 if(lastOffset < suballoc.offset)
9930 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9941 lastOffset = suballoc.offset + suballoc.size;
9942 ++nextAlloc1stIndex;
9947 if(lastOffset < freeSpace1stTo2ndEnd)
9950 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9957 lastOffset = freeSpace1stTo2ndEnd;
9961 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9963 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9964 while(lastOffset < size)
9967 while(nextAlloc2ndIndex != SIZE_MAX &&
9968 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9970 --nextAlloc2ndIndex;
9974 if(nextAlloc2ndIndex != SIZE_MAX)
9976 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9979 if(lastOffset < suballoc.offset)
9982 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9993 lastOffset = suballoc.offset + suballoc.size;
9994 --nextAlloc2ndIndex;
9999 if(lastOffset < size)
10002 const VkDeviceSize unusedRangeSize = size - lastOffset;
10015 #if VMA_STATS_STRING_ENABLED
10016 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10018 const VkDeviceSize size = GetSize();
10019 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10020 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10021 const size_t suballoc1stCount = suballocations1st.size();
10022 const size_t suballoc2ndCount = suballocations2nd.size();
10026 size_t unusedRangeCount = 0;
10027 VkDeviceSize usedBytes = 0;
10029 VkDeviceSize lastOffset = 0;
10031 size_t alloc2ndCount = 0;
10032 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10034 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10035 size_t nextAlloc2ndIndex = 0;
10036 while(lastOffset < freeSpace2ndTo1stEnd)
10039 while(nextAlloc2ndIndex < suballoc2ndCount &&
10040 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10042 ++nextAlloc2ndIndex;
10046 if(nextAlloc2ndIndex < suballoc2ndCount)
10048 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10051 if(lastOffset < suballoc.offset)
10054 ++unusedRangeCount;
10060 usedBytes += suballoc.size;
10063 lastOffset = suballoc.offset + suballoc.size;
10064 ++nextAlloc2ndIndex;
10069 if(lastOffset < freeSpace2ndTo1stEnd)
10072 ++unusedRangeCount;
10076 lastOffset = freeSpace2ndTo1stEnd;
10081 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10082 size_t alloc1stCount = 0;
10083 const VkDeviceSize freeSpace1stTo2ndEnd =
10084 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10085 while(lastOffset < freeSpace1stTo2ndEnd)
10088 while(nextAlloc1stIndex < suballoc1stCount &&
10089 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10091 ++nextAlloc1stIndex;
10095 if(nextAlloc1stIndex < suballoc1stCount)
10097 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10100 if(lastOffset < suballoc.offset)
10103 ++unusedRangeCount;
10109 usedBytes += suballoc.size;
10112 lastOffset = suballoc.offset + suballoc.size;
10113 ++nextAlloc1stIndex;
10118 if(lastOffset < size)
10121 ++unusedRangeCount;
10125 lastOffset = freeSpace1stTo2ndEnd;
10129 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10131 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10132 while(lastOffset < size)
10135 while(nextAlloc2ndIndex != SIZE_MAX &&
10136 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10138 --nextAlloc2ndIndex;
10142 if(nextAlloc2ndIndex != SIZE_MAX)
10144 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10147 if(lastOffset < suballoc.offset)
10150 ++unusedRangeCount;
10156 usedBytes += suballoc.size;
10159 lastOffset = suballoc.offset + suballoc.size;
10160 --nextAlloc2ndIndex;
10165 if(lastOffset < size)
10168 ++unusedRangeCount;
10177 const VkDeviceSize unusedBytes = size - usedBytes;
10178 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
10183 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10185 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10186 size_t nextAlloc2ndIndex = 0;
10187 while(lastOffset < freeSpace2ndTo1stEnd)
10190 while(nextAlloc2ndIndex < suballoc2ndCount &&
10191 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10193 ++nextAlloc2ndIndex;
10197 if(nextAlloc2ndIndex < suballoc2ndCount)
10199 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10202 if(lastOffset < suballoc.offset)
10205 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10206 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10211 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10214 lastOffset = suballoc.offset + suballoc.size;
10215 ++nextAlloc2ndIndex;
10220 if(lastOffset < freeSpace2ndTo1stEnd)
10223 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10224 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10228 lastOffset = freeSpace2ndTo1stEnd;
10233 nextAlloc1stIndex = m_1stNullItemsBeginCount;
10234 while(lastOffset < freeSpace1stTo2ndEnd)
10237 while(nextAlloc1stIndex < suballoc1stCount &&
10238 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10240 ++nextAlloc1stIndex;
10244 if(nextAlloc1stIndex < suballoc1stCount)
10246 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10249 if(lastOffset < suballoc.offset)
10252 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10253 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10258 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10261 lastOffset = suballoc.offset + suballoc.size;
10262 ++nextAlloc1stIndex;
10267 if(lastOffset < freeSpace1stTo2ndEnd)
10270 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10271 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10275 lastOffset = freeSpace1stTo2ndEnd;
10279 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10281 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10282 while(lastOffset < size)
10285 while(nextAlloc2ndIndex != SIZE_MAX &&
10286 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10288 --nextAlloc2ndIndex;
10292 if(nextAlloc2ndIndex != SIZE_MAX)
10294 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10297 if(lastOffset < suballoc.offset)
10300 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10301 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10306 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10309 lastOffset = suballoc.offset + suballoc.size;
10310 --nextAlloc2ndIndex;
10315 if(lastOffset < size)
10318 const VkDeviceSize unusedRangeSize = size - lastOffset;
10319 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10328 PrintDetailedMap_End(json);
10330 #endif // #if VMA_STATS_STRING_ENABLED
10332 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10333 uint32_t currentFrameIndex,
10334 uint32_t frameInUseCount,
10335 VkDeviceSize bufferImageGranularity,
10336 VkDeviceSize allocSize,
10337 VkDeviceSize allocAlignment,
10339 VmaSuballocationType allocType,
10340 bool canMakeOtherLost,
10342 VmaAllocationRequest* pAllocationRequest)
10344 VMA_ASSERT(allocSize > 0);
10345 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10346 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10347 VMA_HEAVY_ASSERT(Validate());
10348 return upperAddress ?
10349 CreateAllocationRequest_UpperAddress(
10350 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10351 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10352 CreateAllocationRequest_LowerAddress(
10353 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10354 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10357 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10358 uint32_t currentFrameIndex,
10359 uint32_t frameInUseCount,
10360 VkDeviceSize bufferImageGranularity,
10361 VkDeviceSize allocSize,
10362 VkDeviceSize allocAlignment,
10363 VmaSuballocationType allocType,
10364 bool canMakeOtherLost,
10366 VmaAllocationRequest* pAllocationRequest)
10368 const VkDeviceSize size = GetSize();
10369 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10370 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10372 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10374 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10379 if(allocSize > size)
10383 VkDeviceSize resultBaseOffset = size - allocSize;
10384 if(!suballocations2nd.empty())
10386 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10387 resultBaseOffset = lastSuballoc.offset - allocSize;
10388 if(allocSize > lastSuballoc.offset)
10395 VkDeviceSize resultOffset = resultBaseOffset;
10398 if(VMA_DEBUG_MARGIN > 0)
10400 if(resultOffset < VMA_DEBUG_MARGIN)
10404 resultOffset -= VMA_DEBUG_MARGIN;
10408 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10412 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10414 bool bufferImageGranularityConflict =
false;
10415 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10417 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10418 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10420 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10422 bufferImageGranularityConflict =
true;
10430 if(bufferImageGranularityConflict)
10432 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10437 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10438 suballocations1st.back().offset + suballocations1st.back().size :
10440 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10444 if(bufferImageGranularity > 1)
10446 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10448 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10449 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10451 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10465 pAllocationRequest->offset = resultOffset;
10466 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10467 pAllocationRequest->sumItemSize = 0;
10469 pAllocationRequest->itemsToMakeLostCount = 0;
10470 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10477 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10478 uint32_t currentFrameIndex,
10479 uint32_t frameInUseCount,
10480 VkDeviceSize bufferImageGranularity,
10481 VkDeviceSize allocSize,
10482 VkDeviceSize allocAlignment,
10483 VmaSuballocationType allocType,
10484 bool canMakeOtherLost,
10486 VmaAllocationRequest* pAllocationRequest)
10488 const VkDeviceSize size = GetSize();
10489 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10490 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10492 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10496 VkDeviceSize resultBaseOffset = 0;
10497 if(!suballocations1st.empty())
10499 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10500 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10504 VkDeviceSize resultOffset = resultBaseOffset;
10507 if(VMA_DEBUG_MARGIN > 0)
10509 resultOffset += VMA_DEBUG_MARGIN;
10513 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10517 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10519 bool bufferImageGranularityConflict =
false;
10520 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10522 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10523 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10525 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10527 bufferImageGranularityConflict =
true;
10535 if(bufferImageGranularityConflict)
10537 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10541 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10542 suballocations2nd.back().offset : size;
10545 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10549 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10551 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10553 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10554 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10556 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10570 pAllocationRequest->offset = resultOffset;
10571 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10572 pAllocationRequest->sumItemSize = 0;
10574 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10575 pAllocationRequest->itemsToMakeLostCount = 0;
10582 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10584 VMA_ASSERT(!suballocations1st.empty());
10586 VkDeviceSize resultBaseOffset = 0;
10587 if(!suballocations2nd.empty())
10589 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10590 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10594 VkDeviceSize resultOffset = resultBaseOffset;
10597 if(VMA_DEBUG_MARGIN > 0)
10599 resultOffset += VMA_DEBUG_MARGIN;
10603 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10607 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10609 bool bufferImageGranularityConflict =
false;
10610 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10612 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10613 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10615 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10617 bufferImageGranularityConflict =
true;
10625 if(bufferImageGranularityConflict)
10627 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10631 pAllocationRequest->itemsToMakeLostCount = 0;
10632 pAllocationRequest->sumItemSize = 0;
10633 size_t index1st = m_1stNullItemsBeginCount;
10635 if(canMakeOtherLost)
10637 while(index1st < suballocations1st.size() &&
10638 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10641 const VmaSuballocation& suballoc = suballocations1st[index1st];
10642 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10648 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10649 if(suballoc.hAllocation->CanBecomeLost() &&
10650 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10652 ++pAllocationRequest->itemsToMakeLostCount;
10653 pAllocationRequest->sumItemSize += suballoc.size;
10665 if(bufferImageGranularity > 1)
10667 while(index1st < suballocations1st.size())
10669 const VmaSuballocation& suballoc = suballocations1st[index1st];
10670 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10672 if(suballoc.hAllocation != VK_NULL_HANDLE)
10675 if(suballoc.hAllocation->CanBecomeLost() &&
10676 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10678 ++pAllocationRequest->itemsToMakeLostCount;
10679 pAllocationRequest->sumItemSize += suballoc.size;
10697 if(index1st == suballocations1st.size() &&
10698 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10701 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10706 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10707 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10711 if(bufferImageGranularity > 1)
10713 for(
size_t nextSuballocIndex = index1st;
10714 nextSuballocIndex < suballocations1st.size();
10715 nextSuballocIndex++)
10717 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10718 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10720 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10734 pAllocationRequest->offset = resultOffset;
10735 pAllocationRequest->sumFreeSize =
10736 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10738 - pAllocationRequest->sumItemSize;
10739 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10748 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10749 uint32_t currentFrameIndex,
10750 uint32_t frameInUseCount,
10751 VmaAllocationRequest* pAllocationRequest)
10753 if(pAllocationRequest->itemsToMakeLostCount == 0)
10758 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10761 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10762 size_t index = m_1stNullItemsBeginCount;
10763 size_t madeLostCount = 0;
10764 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10766 if(index == suballocations->size())
10770 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10772 suballocations = &AccessSuballocations2nd();
10776 VMA_ASSERT(!suballocations->empty());
10778 VmaSuballocation& suballoc = (*suballocations)[index];
10779 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10781 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10782 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10783 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10785 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10786 suballoc.hAllocation = VK_NULL_HANDLE;
10787 m_SumFreeSize += suballoc.size;
10788 if(suballocations == &AccessSuballocations1st())
10790 ++m_1stNullItemsMiddleCount;
10794 ++m_2ndNullItemsCount;
10806 CleanupAfterFree();
10812 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10814 uint32_t lostAllocationCount = 0;
10816 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10817 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10819 VmaSuballocation& suballoc = suballocations1st[i];
10820 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10821 suballoc.hAllocation->CanBecomeLost() &&
10822 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10824 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10825 suballoc.hAllocation = VK_NULL_HANDLE;
10826 ++m_1stNullItemsMiddleCount;
10827 m_SumFreeSize += suballoc.size;
10828 ++lostAllocationCount;
10832 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10833 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10835 VmaSuballocation& suballoc = suballocations2nd[i];
10836 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10837 suballoc.hAllocation->CanBecomeLost() &&
10838 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10840 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10841 suballoc.hAllocation = VK_NULL_HANDLE;
10842 ++m_2ndNullItemsCount;
10843 m_SumFreeSize += suballoc.size;
10844 ++lostAllocationCount;
10848 if(lostAllocationCount)
10850 CleanupAfterFree();
10853 return lostAllocationCount;
10856 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10858 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10859 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10861 const VmaSuballocation& suballoc = suballocations1st[i];
10862 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10864 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10866 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10867 return VK_ERROR_VALIDATION_FAILED_EXT;
10869 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10871 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10872 return VK_ERROR_VALIDATION_FAILED_EXT;
10877 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10878 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10880 const VmaSuballocation& suballoc = suballocations2nd[i];
10881 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10883 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10885 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10886 return VK_ERROR_VALIDATION_FAILED_EXT;
10888 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10890 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10891 return VK_ERROR_VALIDATION_FAILED_EXT;
10899 void VmaBlockMetadata_Linear::Alloc(
10900 const VmaAllocationRequest& request,
10901 VmaSuballocationType type,
10902 VkDeviceSize allocSize,
10905 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10907 switch(request.type)
10909 case VmaAllocationRequestType::UpperAddress:
10911 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10912 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10913 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10914 suballocations2nd.push_back(newSuballoc);
10915 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10918 case VmaAllocationRequestType::EndOf1st:
10920 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10922 VMA_ASSERT(suballocations1st.empty() ||
10923 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10925 VMA_ASSERT(request.offset + allocSize <= GetSize());
10927 suballocations1st.push_back(newSuballoc);
10930 case VmaAllocationRequestType::EndOf2nd:
10932 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10934 VMA_ASSERT(!suballocations1st.empty() &&
10935 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10936 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10938 switch(m_2ndVectorMode)
10940 case SECOND_VECTOR_EMPTY:
10942 VMA_ASSERT(suballocations2nd.empty());
10943 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10945 case SECOND_VECTOR_RING_BUFFER:
10947 VMA_ASSERT(!suballocations2nd.empty());
10949 case SECOND_VECTOR_DOUBLE_STACK:
10950 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10956 suballocations2nd.push_back(newSuballoc);
10960 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10963 m_SumFreeSize -= newSuballoc.size;
10966 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10968 FreeAtOffset(allocation->GetOffset());
10971 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10973 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10974 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10976 if(!suballocations1st.empty())
10979 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10980 if(firstSuballoc.offset == offset)
10982 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10983 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10984 m_SumFreeSize += firstSuballoc.size;
10985 ++m_1stNullItemsBeginCount;
10986 CleanupAfterFree();
10992 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10993 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10995 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10996 if(lastSuballoc.offset == offset)
10998 m_SumFreeSize += lastSuballoc.size;
10999 suballocations2nd.pop_back();
11000 CleanupAfterFree();
11005 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11007 VmaSuballocation& lastSuballoc = suballocations1st.back();
11008 if(lastSuballoc.offset == offset)
11010 m_SumFreeSize += lastSuballoc.size;
11011 suballocations1st.pop_back();
11012 CleanupAfterFree();
11019 VmaSuballocation refSuballoc;
11020 refSuballoc.offset = offset;
11022 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11023 suballocations1st.begin() + m_1stNullItemsBeginCount,
11024 suballocations1st.end(),
11026 VmaSuballocationOffsetLess());
11027 if(it != suballocations1st.end())
11029 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11030 it->hAllocation = VK_NULL_HANDLE;
11031 ++m_1stNullItemsMiddleCount;
11032 m_SumFreeSize += it->size;
11033 CleanupAfterFree();
11038 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11041 VmaSuballocation refSuballoc;
11042 refSuballoc.offset = offset;
11044 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11045 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11046 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11047 if(it != suballocations2nd.end())
11049 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11050 it->hAllocation = VK_NULL_HANDLE;
11051 ++m_2ndNullItemsCount;
11052 m_SumFreeSize += it->size;
11053 CleanupAfterFree();
11058 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11061 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11063 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11064 const size_t suballocCount = AccessSuballocations1st().size();
11065 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11068 void VmaBlockMetadata_Linear::CleanupAfterFree()
11070 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11071 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11075 suballocations1st.clear();
11076 suballocations2nd.clear();
11077 m_1stNullItemsBeginCount = 0;
11078 m_1stNullItemsMiddleCount = 0;
11079 m_2ndNullItemsCount = 0;
11080 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11084 const size_t suballoc1stCount = suballocations1st.size();
11085 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11086 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11089 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11090 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11092 ++m_1stNullItemsBeginCount;
11093 --m_1stNullItemsMiddleCount;
11097 while(m_1stNullItemsMiddleCount > 0 &&
11098 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11100 --m_1stNullItemsMiddleCount;
11101 suballocations1st.pop_back();
11105 while(m_2ndNullItemsCount > 0 &&
11106 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11108 --m_2ndNullItemsCount;
11109 suballocations2nd.pop_back();
11113 while(m_2ndNullItemsCount > 0 &&
11114 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11116 --m_2ndNullItemsCount;
11117 VmaVectorRemove(suballocations2nd, 0);
11120 if(ShouldCompact1st())
11122 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11123 size_t srcIndex = m_1stNullItemsBeginCount;
11124 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11126 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11130 if(dstIndex != srcIndex)
11132 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11136 suballocations1st.resize(nonNullItemCount);
11137 m_1stNullItemsBeginCount = 0;
11138 m_1stNullItemsMiddleCount = 0;
11142 if(suballocations2nd.empty())
11144 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11148 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11150 suballocations1st.clear();
11151 m_1stNullItemsBeginCount = 0;
11153 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11156 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11157 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11158 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
11159 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11161 ++m_1stNullItemsBeginCount;
11162 --m_1stNullItemsMiddleCount;
11164 m_2ndNullItemsCount = 0;
11165 m_1stVectorIndex ^= 1;
11170 VMA_HEAVY_ASSERT(Validate());
11177 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
11178 VmaBlockMetadata(hAllocator),
11180 m_AllocationCount(0),
11184 memset(m_FreeList, 0,
sizeof(m_FreeList));
11187 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
11189 DeleteNode(m_Root);
11192 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
11194 VmaBlockMetadata::Init(size);
11196 m_UsableSize = VmaPrevPow2(size);
11197 m_SumFreeSize = m_UsableSize;
11201 while(m_LevelCount < MAX_LEVELS &&
11202 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
11207 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
11208 rootNode->offset = 0;
11209 rootNode->type = Node::TYPE_FREE;
11210 rootNode->parent = VMA_NULL;
11211 rootNode->buddy = VMA_NULL;
11214 AddToFreeListFront(0, rootNode);
11217 bool VmaBlockMetadata_Buddy::Validate()
const
11220 ValidationContext ctx;
11221 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
11223 VMA_VALIDATE(
false &&
"ValidateNode failed.");
11225 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
11226 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
11229 for(uint32_t level = 0; level < m_LevelCount; ++level)
11231 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
11232 m_FreeList[level].front->free.prev == VMA_NULL);
11234 for(Node* node = m_FreeList[level].front;
11236 node = node->free.next)
11238 VMA_VALIDATE(node->type == Node::TYPE_FREE);
11240 if(node->free.next == VMA_NULL)
11242 VMA_VALIDATE(m_FreeList[level].back == node);
11246 VMA_VALIDATE(node->free.next->free.prev == node);
11252 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
11254 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
11260 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
11262 for(uint32_t level = 0; level < m_LevelCount; ++level)
11264 if(m_FreeList[level].front != VMA_NULL)
11266 return LevelToNodeSize(level);
11272 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
11274 const VkDeviceSize unusableSize = GetUnusableSize();
11285 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11287 if(unusableSize > 0)
11296 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11298 const VkDeviceSize unusableSize = GetUnusableSize();
11300 inoutStats.
size += GetSize();
11301 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11306 if(unusableSize > 0)
11313 #if VMA_STATS_STRING_ENABLED
11315 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11319 CalcAllocationStatInfo(stat);
11321 PrintDetailedMap_Begin(
11327 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11329 const VkDeviceSize unusableSize = GetUnusableSize();
11330 if(unusableSize > 0)
11332 PrintDetailedMap_UnusedRange(json,
11337 PrintDetailedMap_End(json);
11340 #endif // #if VMA_STATS_STRING_ENABLED
11342 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11343 uint32_t currentFrameIndex,
11344 uint32_t frameInUseCount,
11345 VkDeviceSize bufferImageGranularity,
11346 VkDeviceSize allocSize,
11347 VkDeviceSize allocAlignment,
11349 VmaSuballocationType allocType,
11350 bool canMakeOtherLost,
11352 VmaAllocationRequest* pAllocationRequest)
11354 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11358 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11359 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11360 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11362 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11363 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11366 if(allocSize > m_UsableSize)
11371 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11372 for(uint32_t level = targetLevel + 1; level--; )
11374 for(Node* freeNode = m_FreeList[level].front;
11375 freeNode != VMA_NULL;
11376 freeNode = freeNode->free.next)
11378 if(freeNode->offset % allocAlignment == 0)
11380 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11381 pAllocationRequest->offset = freeNode->offset;
11382 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11383 pAllocationRequest->sumItemSize = 0;
11384 pAllocationRequest->itemsToMakeLostCount = 0;
11385 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11394 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11395 uint32_t currentFrameIndex,
11396 uint32_t frameInUseCount,
11397 VmaAllocationRequest* pAllocationRequest)
11403 return pAllocationRequest->itemsToMakeLostCount == 0;
11406 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11415 void VmaBlockMetadata_Buddy::Alloc(
11416 const VmaAllocationRequest& request,
11417 VmaSuballocationType type,
11418 VkDeviceSize allocSize,
11421 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11423 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11424 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11426 Node* currNode = m_FreeList[currLevel].front;
11427 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11428 while(currNode->offset != request.offset)
11430 currNode = currNode->free.next;
11431 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11435 while(currLevel < targetLevel)
11439 RemoveFromFreeList(currLevel, currNode);
11441 const uint32_t childrenLevel = currLevel + 1;
11444 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11445 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11447 leftChild->offset = currNode->offset;
11448 leftChild->type = Node::TYPE_FREE;
11449 leftChild->parent = currNode;
11450 leftChild->buddy = rightChild;
11452 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11453 rightChild->type = Node::TYPE_FREE;
11454 rightChild->parent = currNode;
11455 rightChild->buddy = leftChild;
11458 currNode->type = Node::TYPE_SPLIT;
11459 currNode->split.leftChild = leftChild;
11462 AddToFreeListFront(childrenLevel, rightChild);
11463 AddToFreeListFront(childrenLevel, leftChild);
11468 currNode = m_FreeList[currLevel].front;
11477 VMA_ASSERT(currLevel == targetLevel &&
11478 currNode != VMA_NULL &&
11479 currNode->type == Node::TYPE_FREE);
11480 RemoveFromFreeList(currLevel, currNode);
11483 currNode->type = Node::TYPE_ALLOCATION;
11484 currNode->allocation.alloc = hAllocation;
11486 ++m_AllocationCount;
11488 m_SumFreeSize -= allocSize;
11491 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11493 if(node->type == Node::TYPE_SPLIT)
11495 DeleteNode(node->split.leftChild->buddy);
11496 DeleteNode(node->split.leftChild);
11499 vma_delete(GetAllocationCallbacks(), node);
11502 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11504 VMA_VALIDATE(level < m_LevelCount);
11505 VMA_VALIDATE(curr->parent == parent);
11506 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11507 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11510 case Node::TYPE_FREE:
11512 ctx.calculatedSumFreeSize += levelNodeSize;
11513 ++ctx.calculatedFreeCount;
11515 case Node::TYPE_ALLOCATION:
11516 ++ctx.calculatedAllocationCount;
11517 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11518 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11520 case Node::TYPE_SPLIT:
11522 const uint32_t childrenLevel = level + 1;
11523 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11524 const Node*
const leftChild = curr->split.leftChild;
11525 VMA_VALIDATE(leftChild != VMA_NULL);
11526 VMA_VALIDATE(leftChild->offset == curr->offset);
11527 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11529 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11531 const Node*
const rightChild = leftChild->buddy;
11532 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11533 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11535 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11546 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11549 uint32_t level = 0;
11550 VkDeviceSize currLevelNodeSize = m_UsableSize;
11551 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11552 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11555 currLevelNodeSize = nextLevelNodeSize;
11556 nextLevelNodeSize = currLevelNodeSize >> 1;
11561 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11564 Node* node = m_Root;
11565 VkDeviceSize nodeOffset = 0;
11566 uint32_t level = 0;
11567 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11568 while(node->type == Node::TYPE_SPLIT)
11570 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11571 if(offset < nodeOffset + nextLevelSize)
11573 node = node->split.leftChild;
11577 node = node->split.leftChild->buddy;
11578 nodeOffset += nextLevelSize;
11581 levelNodeSize = nextLevelSize;
11584 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11585 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11588 --m_AllocationCount;
11589 m_SumFreeSize += alloc->GetSize();
11591 node->type = Node::TYPE_FREE;
11594 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11596 RemoveFromFreeList(level, node->buddy);
11597 Node*
const parent = node->parent;
11599 vma_delete(GetAllocationCallbacks(), node->buddy);
11600 vma_delete(GetAllocationCallbacks(), node);
11601 parent->type = Node::TYPE_FREE;
11609 AddToFreeListFront(level, node);
11612 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11616 case Node::TYPE_FREE:
11622 case Node::TYPE_ALLOCATION:
11624 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11630 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11631 if(unusedRangeSize > 0)
11640 case Node::TYPE_SPLIT:
11642 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11643 const Node*
const leftChild = node->split.leftChild;
11644 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11645 const Node*
const rightChild = leftChild->buddy;
11646 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11654 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11656 VMA_ASSERT(node->type == Node::TYPE_FREE);
11659 Node*
const frontNode = m_FreeList[level].front;
11660 if(frontNode == VMA_NULL)
11662 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11663 node->free.prev = node->free.next = VMA_NULL;
11664 m_FreeList[level].front = m_FreeList[level].back = node;
11668 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11669 node->free.prev = VMA_NULL;
11670 node->free.next = frontNode;
11671 frontNode->free.prev = node;
11672 m_FreeList[level].front = node;
11676 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11678 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11681 if(node->free.prev == VMA_NULL)
11683 VMA_ASSERT(m_FreeList[level].front == node);
11684 m_FreeList[level].front = node->free.next;
11688 Node*
const prevFreeNode = node->free.prev;
11689 VMA_ASSERT(prevFreeNode->free.next == node);
11690 prevFreeNode->free.next = node->free.next;
11694 if(node->free.next == VMA_NULL)
11696 VMA_ASSERT(m_FreeList[level].back == node);
11697 m_FreeList[level].back = node->free.prev;
11701 Node*
const nextFreeNode = node->free.next;
11702 VMA_ASSERT(nextFreeNode->free.prev == node);
11703 nextFreeNode->free.prev = node->free.prev;
11707 #if VMA_STATS_STRING_ENABLED
11708 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
11712 case Node::TYPE_FREE:
11713 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11715 case Node::TYPE_ALLOCATION:
11717 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11718 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11719 if(allocSize < levelNodeSize)
11721 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11725 case Node::TYPE_SPLIT:
11727 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11728 const Node*
const leftChild = node->split.leftChild;
11729 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11730 const Node*
const rightChild = leftChild->buddy;
11731 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11738 #endif // #if VMA_STATS_STRING_ENABLED
11744 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11745 m_pMetadata(VMA_NULL),
11746 m_MemoryTypeIndex(UINT32_MAX),
11748 m_hMemory(VK_NULL_HANDLE),
11750 m_pMappedData(VMA_NULL)
11754 void VmaDeviceMemoryBlock::Init(
11757 uint32_t newMemoryTypeIndex,
11758 VkDeviceMemory newMemory,
11759 VkDeviceSize newSize,
11761 uint32_t algorithm)
11763 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11765 m_hParentPool = hParentPool;
11766 m_MemoryTypeIndex = newMemoryTypeIndex;
11768 m_hMemory = newMemory;
11773 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11776 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11782 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11784 m_pMetadata->Init(newSize);
11787 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11791 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11793 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11794 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11795 m_hMemory = VK_NULL_HANDLE;
11797 vma_delete(allocator, m_pMetadata);
11798 m_pMetadata = VMA_NULL;
11801 bool VmaDeviceMemoryBlock::Validate()
const
11803 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11804 (m_pMetadata->GetSize() != 0));
11806 return m_pMetadata->Validate();
11809 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11811 void* pData =
nullptr;
11812 VkResult res = Map(hAllocator, 1, &pData);
11813 if(res != VK_SUCCESS)
11818 res = m_pMetadata->CheckCorruption(pData);
11820 Unmap(hAllocator, 1);
11825 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11832 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11833 if(m_MapCount != 0)
11835 m_MapCount += count;
11836 VMA_ASSERT(m_pMappedData != VMA_NULL);
11837 if(ppData != VMA_NULL)
11839 *ppData = m_pMappedData;
11845 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11846 hAllocator->m_hDevice,
11852 if(result == VK_SUCCESS)
11854 if(ppData != VMA_NULL)
11856 *ppData = m_pMappedData;
11858 m_MapCount = count;
11864 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11871 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11872 if(m_MapCount >= count)
11874 m_MapCount -= count;
11875 if(m_MapCount == 0)
11877 m_pMappedData = VMA_NULL;
11878 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11883 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11887 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11889 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11890 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11893 VkResult res = Map(hAllocator, 1, &pData);
11894 if(res != VK_SUCCESS)
11899 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11900 VmaWriteMagicValue(pData, allocOffset + allocSize);
11902 Unmap(hAllocator, 1);
11907 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11909 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11910 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11913 VkResult res = Map(hAllocator, 1, &pData);
11914 if(res != VK_SUCCESS)
11919 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11921 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11923 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11925 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11928 Unmap(hAllocator, 1);
11933 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11936 VkDeviceSize allocationLocalOffset,
11940 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11941 hAllocation->GetBlock() ==
this);
11942 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11943 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11944 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11946 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11947 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11950 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11953 VkDeviceSize allocationLocalOffset,
11957 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11958 hAllocation->GetBlock() ==
this);
11959 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11960 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11961 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11963 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11964 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11969 memset(&outInfo, 0,
sizeof(outInfo));
11988 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11996 VmaPool_T::VmaPool_T(
11999 VkDeviceSize preferredBlockSize) :
12003 createInfo.memoryTypeIndex,
12004 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12005 createInfo.minBlockCount,
12006 createInfo.maxBlockCount,
12008 createInfo.frameInUseCount,
12009 createInfo.blockSize != 0,
12016 VmaPool_T::~VmaPool_T()
12020 void VmaPool_T::SetName(
const char* pName)
12022 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12023 VmaFreeString(allocs, m_Name);
12025 if(pName != VMA_NULL)
12027 m_Name = VmaCreateStringCopy(allocs, pName);
12035 #if VMA_STATS_STRING_ENABLED
12037 #endif // #if VMA_STATS_STRING_ENABLED
12039 VmaBlockVector::VmaBlockVector(
12042 uint32_t memoryTypeIndex,
12043 VkDeviceSize preferredBlockSize,
12044 size_t minBlockCount,
12045 size_t maxBlockCount,
12046 VkDeviceSize bufferImageGranularity,
12047 uint32_t frameInUseCount,
12048 bool explicitBlockSize,
12049 uint32_t algorithm) :
12050 m_hAllocator(hAllocator),
12051 m_hParentPool(hParentPool),
12052 m_MemoryTypeIndex(memoryTypeIndex),
12053 m_PreferredBlockSize(preferredBlockSize),
12054 m_MinBlockCount(minBlockCount),
12055 m_MaxBlockCount(maxBlockCount),
12056 m_BufferImageGranularity(bufferImageGranularity),
12057 m_FrameInUseCount(frameInUseCount),
12058 m_ExplicitBlockSize(explicitBlockSize),
12059 m_Algorithm(algorithm),
12060 m_HasEmptyBlock(false),
12061 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12066 VmaBlockVector::~VmaBlockVector()
12068 for(
size_t i = m_Blocks.size(); i--; )
12070 m_Blocks[i]->Destroy(m_hAllocator);
12071 vma_delete(m_hAllocator, m_Blocks[i]);
12075 VkResult VmaBlockVector::CreateMinBlocks()
12077 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12079 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12080 if(res != VK_SUCCESS)
12088 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12090 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12092 const size_t blockCount = m_Blocks.size();
12101 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12103 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12104 VMA_ASSERT(pBlock);
12105 VMA_HEAVY_ASSERT(pBlock->Validate());
12106 pBlock->m_pMetadata->AddPoolStats(*pStats);
12110 bool VmaBlockVector::IsEmpty()
12112 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12113 return m_Blocks.empty();
12116 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12118 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12119 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12120 (VMA_DEBUG_MARGIN > 0) &&
12122 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12125 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12127 VkResult VmaBlockVector::Allocate(
12128 uint32_t currentFrameIndex,
12130 VkDeviceSize alignment,
12132 VmaSuballocationType suballocType,
12133 size_t allocationCount,
12137 VkResult res = VK_SUCCESS;
12139 if(IsCorruptionDetectionEnabled())
12141 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12142 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12146 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12147 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12149 res = AllocatePage(
12155 pAllocations + allocIndex);
12156 if(res != VK_SUCCESS)
12163 if(res != VK_SUCCESS)
12166 while(allocIndex--)
12168 Free(pAllocations[allocIndex]);
12170 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12176 VkResult VmaBlockVector::AllocatePage(
12177 uint32_t currentFrameIndex,
12179 VkDeviceSize alignment,
12181 VmaSuballocationType suballocType,
12189 VkDeviceSize freeMemory;
12191 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12193 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12197 const bool canFallbackToDedicated = !IsCustomPool();
12198 const bool canCreateNewBlock =
12200 (m_Blocks.size() < m_MaxBlockCount) &&
12201 (freeMemory >= size || !canFallbackToDedicated);
12208 canMakeOtherLost =
false;
12212 if(isUpperAddress &&
12215 return VK_ERROR_FEATURE_NOT_PRESENT;
12229 return VK_ERROR_FEATURE_NOT_PRESENT;
12233 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12235 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12243 if(!canMakeOtherLost || canCreateNewBlock)
12252 if(!m_Blocks.empty())
12254 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12255 VMA_ASSERT(pCurrBlock);
12256 VkResult res = AllocateFromBlock(
12266 if(res == VK_SUCCESS)
12268 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12278 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12280 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12281 VMA_ASSERT(pCurrBlock);
12282 VkResult res = AllocateFromBlock(
12292 if(res == VK_SUCCESS)
12294 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12302 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12304 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12305 VMA_ASSERT(pCurrBlock);
12306 VkResult res = AllocateFromBlock(
12316 if(res == VK_SUCCESS)
12318 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12326 if(canCreateNewBlock)
12329 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12330 uint32_t newBlockSizeShift = 0;
12331 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12333 if(!m_ExplicitBlockSize)
12336 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12337 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12339 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12340 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12342 newBlockSize = smallerNewBlockSize;
12343 ++newBlockSizeShift;
12352 size_t newBlockIndex = 0;
12353 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12354 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12356 if(!m_ExplicitBlockSize)
12358 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12360 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12361 if(smallerNewBlockSize >= size)
12363 newBlockSize = smallerNewBlockSize;
12364 ++newBlockSizeShift;
12365 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12366 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12375 if(res == VK_SUCCESS)
12377 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12378 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12380 res = AllocateFromBlock(
12390 if(res == VK_SUCCESS)
12392 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12398 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12405 if(canMakeOtherLost)
12407 uint32_t tryIndex = 0;
12408 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12410 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12411 VmaAllocationRequest bestRequest = {};
12412 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12418 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12420 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12421 VMA_ASSERT(pCurrBlock);
12422 VmaAllocationRequest currRequest = {};
12423 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12426 m_BufferImageGranularity,
12435 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12436 if(pBestRequestBlock == VMA_NULL ||
12437 currRequestCost < bestRequestCost)
12439 pBestRequestBlock = pCurrBlock;
12440 bestRequest = currRequest;
12441 bestRequestCost = currRequestCost;
12443 if(bestRequestCost == 0)
12454 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12456 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12457 VMA_ASSERT(pCurrBlock);
12458 VmaAllocationRequest currRequest = {};
12459 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12462 m_BufferImageGranularity,
12471 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12472 if(pBestRequestBlock == VMA_NULL ||
12473 currRequestCost < bestRequestCost ||
12476 pBestRequestBlock = pCurrBlock;
12477 bestRequest = currRequest;
12478 bestRequestCost = currRequestCost;
12480 if(bestRequestCost == 0 ||
12490 if(pBestRequestBlock != VMA_NULL)
12494 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12495 if(res != VK_SUCCESS)
12501 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12507 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12508 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12509 UpdateHasEmptyBlock();
12510 (*pAllocation)->InitBlockAllocation(
12512 bestRequest.offset,
12519 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12520 VMA_DEBUG_LOG(
" Returned from existing block");
12521 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12522 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12523 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12525 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12527 if(IsCorruptionDetectionEnabled())
12529 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12530 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12545 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12547 return VK_ERROR_TOO_MANY_OBJECTS;
12551 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12554 void VmaBlockVector::Free(
12557 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12559 bool budgetExceeded =
false;
12561 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12563 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12564 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12569 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12571 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12573 if(IsCorruptionDetectionEnabled())
12575 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12576 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12579 if(hAllocation->IsPersistentMap())
12581 pBlock->Unmap(m_hAllocator, 1);
12584 pBlock->m_pMetadata->Free(hAllocation);
12585 VMA_HEAVY_ASSERT(pBlock->Validate());
12587 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12589 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12591 if(pBlock->m_pMetadata->IsEmpty())
12594 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12596 pBlockToDelete = pBlock;
12603 else if(m_HasEmptyBlock && canDeleteBlock)
12605 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12606 if(pLastBlock->m_pMetadata->IsEmpty())
12608 pBlockToDelete = pLastBlock;
12609 m_Blocks.pop_back();
12613 UpdateHasEmptyBlock();
12614 IncrementallySortBlocks();
12619 if(pBlockToDelete != VMA_NULL)
12621 VMA_DEBUG_LOG(
" Deleted empty block");
12622 pBlockToDelete->Destroy(m_hAllocator);
12623 vma_delete(m_hAllocator, pBlockToDelete);
12627 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12629 VkDeviceSize result = 0;
12630 for(
size_t i = m_Blocks.size(); i--; )
12632 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12633 if(result >= m_PreferredBlockSize)
12641 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12643 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12645 if(m_Blocks[blockIndex] == pBlock)
12647 VmaVectorRemove(m_Blocks, blockIndex);
12654 void VmaBlockVector::IncrementallySortBlocks()
12659 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12661 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12663 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12670 VkResult VmaBlockVector::AllocateFromBlock(
12671 VmaDeviceMemoryBlock* pBlock,
12672 uint32_t currentFrameIndex,
12674 VkDeviceSize alignment,
12677 VmaSuballocationType suballocType,
12686 VmaAllocationRequest currRequest = {};
12687 if(pBlock->m_pMetadata->CreateAllocationRequest(
12690 m_BufferImageGranularity,
12700 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12704 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12705 if(res != VK_SUCCESS)
12711 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12712 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12713 UpdateHasEmptyBlock();
12714 (*pAllocation)->InitBlockAllocation(
12716 currRequest.offset,
12723 VMA_HEAVY_ASSERT(pBlock->Validate());
12724 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12725 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12726 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12728 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12730 if(IsCorruptionDetectionEnabled())
12732 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12733 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12737 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12740 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12742 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12743 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12744 allocInfo.allocationSize = blockSize;
12746 #if VMA_BUFFER_DEVICE_ADDRESS
12748 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
12749 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
12751 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
12752 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
12754 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
12756 VkDeviceMemory mem = VK_NULL_HANDLE;
12757 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12766 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12772 allocInfo.allocationSize,
12776 m_Blocks.push_back(pBlock);
12777 if(pNewBlockIndex != VMA_NULL)
12779 *pNewBlockIndex = m_Blocks.size() - 1;
12785 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12786 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12787 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12789 const size_t blockCount = m_Blocks.size();
12790 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12794 BLOCK_FLAG_USED = 0x00000001,
12795 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12803 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12804 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12805 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12808 const size_t moveCount = moves.size();
12809 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12811 const VmaDefragmentationMove& move = moves[moveIndex];
12812 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12813 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12816 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12819 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12821 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12822 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12823 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12825 currBlockInfo.pMappedData = pBlock->GetMappedData();
12827 if(currBlockInfo.pMappedData == VMA_NULL)
12829 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12830 if(pDefragCtx->res == VK_SUCCESS)
12832 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12839 if(pDefragCtx->res == VK_SUCCESS)
12841 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12842 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12844 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12846 const VmaDefragmentationMove& move = moves[moveIndex];
12848 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12849 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12851 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12856 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12857 memRange.memory = pSrcBlock->GetDeviceMemory();
12858 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12859 memRange.size = VMA_MIN(
12860 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12861 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12862 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12867 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
12868 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
12869 static_cast<size_t>(move.size));
12871 if(IsCorruptionDetectionEnabled())
12873 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12874 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12880 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12881 memRange.memory = pDstBlock->GetDeviceMemory();
12882 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12883 memRange.size = VMA_MIN(
12884 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12885 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12886 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12893 for(
size_t blockIndex = blockCount; blockIndex--; )
12895 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12896 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12898 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12899 pBlock->Unmap(m_hAllocator, 1);
12904 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12905 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12906 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12907 VkCommandBuffer commandBuffer)
12909 const size_t blockCount = m_Blocks.size();
12911 pDefragCtx->blockContexts.resize(blockCount);
12912 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12915 const size_t moveCount = moves.size();
12916 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12918 const VmaDefragmentationMove& move = moves[moveIndex];
12923 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12924 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12928 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12932 VkBufferCreateInfo bufCreateInfo;
12933 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12935 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12937 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12938 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12939 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12941 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12942 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12943 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12944 if(pDefragCtx->res == VK_SUCCESS)
12946 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12947 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12954 if(pDefragCtx->res == VK_SUCCESS)
12956 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12958 const VmaDefragmentationMove& move = moves[moveIndex];
12960 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12961 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12963 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12965 VkBufferCopy region = {
12969 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12970 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12975 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12977 pDefragCtx->res = VK_NOT_READY;
12983 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12985 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12986 if(pBlock->m_pMetadata->IsEmpty())
12988 if(m_Blocks.size() > m_MinBlockCount)
12990 if(pDefragmentationStats != VMA_NULL)
12993 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12996 VmaVectorRemove(m_Blocks, blockIndex);
12997 pBlock->Destroy(m_hAllocator);
12998 vma_delete(m_hAllocator, pBlock);
13006 UpdateHasEmptyBlock();
13009 void VmaBlockVector::UpdateHasEmptyBlock()
13011 m_HasEmptyBlock =
false;
13012 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13014 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13015 if(pBlock->m_pMetadata->IsEmpty())
13017 m_HasEmptyBlock =
true;
13023 #if VMA_STATS_STRING_ENABLED
13025 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13027 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13029 json.BeginObject();
13033 const char* poolName = m_hParentPool->GetName();
13034 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13036 json.WriteString(
"Name");
13037 json.WriteString(poolName);
13040 json.WriteString(
"MemoryTypeIndex");
13041 json.WriteNumber(m_MemoryTypeIndex);
13043 json.WriteString(
"BlockSize");
13044 json.WriteNumber(m_PreferredBlockSize);
13046 json.WriteString(
"BlockCount");
13047 json.BeginObject(
true);
13048 if(m_MinBlockCount > 0)
13050 json.WriteString(
"Min");
13051 json.WriteNumber((uint64_t)m_MinBlockCount);
13053 if(m_MaxBlockCount < SIZE_MAX)
13055 json.WriteString(
"Max");
13056 json.WriteNumber((uint64_t)m_MaxBlockCount);
13058 json.WriteString(
"Cur");
13059 json.WriteNumber((uint64_t)m_Blocks.size());
13062 if(m_FrameInUseCount > 0)
13064 json.WriteString(
"FrameInUseCount");
13065 json.WriteNumber(m_FrameInUseCount);
13068 if(m_Algorithm != 0)
13070 json.WriteString(
"Algorithm");
13071 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13076 json.WriteString(
"PreferredBlockSize");
13077 json.WriteNumber(m_PreferredBlockSize);
13080 json.WriteString(
"Blocks");
13081 json.BeginObject();
13082 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13084 json.BeginString();
13085 json.ContinueString(m_Blocks[i]->GetId());
13088 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13095 #endif // #if VMA_STATS_STRING_ENABLED
13097 void VmaBlockVector::Defragment(
13098 class VmaBlockVectorDefragmentationContext* pCtx,
13100 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13101 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13102 VkCommandBuffer commandBuffer)
13104 pCtx->res = VK_SUCCESS;
13106 const VkMemoryPropertyFlags memPropFlags =
13107 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13108 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13110 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13112 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13113 !IsCorruptionDetectionEnabled() &&
13114 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13117 if(canDefragmentOnCpu || canDefragmentOnGpu)
13119 bool defragmentOnGpu;
13121 if(canDefragmentOnGpu != canDefragmentOnCpu)
13123 defragmentOnGpu = canDefragmentOnGpu;
13128 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13129 m_hAllocator->IsIntegratedGpu();
13132 bool overlappingMoveSupported = !defragmentOnGpu;
13134 if(m_hAllocator->m_UseMutex)
13138 if(!m_Mutex.TryLockWrite())
13140 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
13146 m_Mutex.LockWrite();
13147 pCtx->mutexLocked =
true;
13151 pCtx->Begin(overlappingMoveSupported, flags);
13155 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
13156 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
13157 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
13160 if(pStats != VMA_NULL)
13162 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
13163 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
13166 VMA_ASSERT(bytesMoved <= maxBytesToMove);
13167 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
13168 if(defragmentOnGpu)
13170 maxGpuBytesToMove -= bytesMoved;
13171 maxGpuAllocationsToMove -= allocationsMoved;
13175 maxCpuBytesToMove -= bytesMoved;
13176 maxCpuAllocationsToMove -= allocationsMoved;
13182 if(m_hAllocator->m_UseMutex)
13183 m_Mutex.UnlockWrite();
13185 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
13186 pCtx->res = VK_NOT_READY;
13191 if(pCtx->res >= VK_SUCCESS)
13193 if(defragmentOnGpu)
13195 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
13199 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
13205 void VmaBlockVector::DefragmentationEnd(
13206 class VmaBlockVectorDefragmentationContext* pCtx,
13210 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
13212 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
13213 if(blockCtx.hBuffer)
13215 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
13216 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
13220 if(pCtx->res >= VK_SUCCESS)
13222 FreeEmptyBlocks(pStats);
13225 if(pCtx->mutexLocked)
13227 VMA_ASSERT(m_hAllocator->m_UseMutex);
13228 m_Mutex.UnlockWrite();
13232 uint32_t VmaBlockVector::ProcessDefragmentations(
13233 class VmaBlockVectorDefragmentationContext *pCtx,
13236 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13238 const uint32_t moveCount = std::min(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
13240 for(uint32_t i = 0; i < moveCount; ++ i)
13242 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
13245 pMove->
memory = move.pDstBlock->GetDeviceMemory();
13246 pMove->
offset = move.dstOffset;
13251 pCtx->defragmentationMovesProcessed += moveCount;
13256 void VmaBlockVector::CommitDefragmentations(
13257 class VmaBlockVectorDefragmentationContext *pCtx,
13260 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13262 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
13264 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
13266 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
13267 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
13270 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
13271 FreeEmptyBlocks(pStats);
13274 size_t VmaBlockVector::CalcAllocationCount()
const
13277 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13279 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
13284 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
13286 if(m_BufferImageGranularity == 1)
13290 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
13291 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
13293 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
13294 VMA_ASSERT(m_Algorithm == 0);
13295 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
13296 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
13304 void VmaBlockVector::MakePoolAllocationsLost(
13305 uint32_t currentFrameIndex,
13306 size_t* pLostAllocationCount)
13308 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13309 size_t lostAllocationCount = 0;
13310 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13312 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13313 VMA_ASSERT(pBlock);
13314 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
13316 if(pLostAllocationCount != VMA_NULL)
13318 *pLostAllocationCount = lostAllocationCount;
13322 VkResult VmaBlockVector::CheckCorruption()
13324 if(!IsCorruptionDetectionEnabled())
13326 return VK_ERROR_FEATURE_NOT_PRESENT;
13329 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13330 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13332 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13333 VMA_ASSERT(pBlock);
13334 VkResult res = pBlock->CheckCorruption(m_hAllocator);
13335 if(res != VK_SUCCESS)
13343 void VmaBlockVector::AddStats(
VmaStats* pStats)
13345 const uint32_t memTypeIndex = m_MemoryTypeIndex;
13346 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
13348 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13350 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13352 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13353 VMA_ASSERT(pBlock);
13354 VMA_HEAVY_ASSERT(pBlock->Validate());
13356 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13357 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13358 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13359 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13366 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13368 VmaBlockVector* pBlockVector,
13369 uint32_t currentFrameIndex,
13370 bool overlappingMoveSupported) :
13371 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13372 m_AllocationCount(0),
13373 m_AllAllocations(false),
13375 m_AllocationsMoved(0),
13376 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13379 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13380 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13382 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13383 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13384 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13385 m_Blocks.push_back(pBlockInfo);
13389 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13392 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13394 for(
size_t i = m_Blocks.size(); i--; )
13396 vma_delete(m_hAllocator, m_Blocks[i]);
13400 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13403 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13405 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13406 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13407 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13409 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13410 (*it)->m_Allocations.push_back(allocInfo);
13417 ++m_AllocationCount;
13421 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13422 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13423 VkDeviceSize maxBytesToMove,
13424 uint32_t maxAllocationsToMove,
13425 bool freeOldAllocations)
13427 if(m_Blocks.empty())
13440 size_t srcBlockMinIndex = 0;
13453 size_t srcBlockIndex = m_Blocks.size() - 1;
13454 size_t srcAllocIndex = SIZE_MAX;
13460 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13462 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13465 if(srcBlockIndex == srcBlockMinIndex)
13472 srcAllocIndex = SIZE_MAX;
13477 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13481 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13482 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13484 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13485 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13486 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13487 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13490 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13492 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13493 VmaAllocationRequest dstAllocRequest;
13494 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13495 m_CurrentFrameIndex,
13496 m_pBlockVector->GetFrameInUseCount(),
13497 m_pBlockVector->GetBufferImageGranularity(),
13504 &dstAllocRequest) &&
13506 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13508 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13511 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13512 (m_BytesMoved + size > maxBytesToMove))
13517 VmaDefragmentationMove move = {};
13518 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13519 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13520 move.srcOffset = srcOffset;
13521 move.dstOffset = dstAllocRequest.offset;
13523 move.hAllocation = allocInfo.m_hAllocation;
13524 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
13525 move.pDstBlock = pDstBlockInfo->m_pBlock;
13527 moves.push_back(move);
13529 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13533 allocInfo.m_hAllocation);
13535 if(freeOldAllocations)
13537 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13538 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13541 if(allocInfo.m_pChanged != VMA_NULL)
13543 *allocInfo.m_pChanged = VK_TRUE;
13546 ++m_AllocationsMoved;
13547 m_BytesMoved += size;
13549 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13557 if(srcAllocIndex > 0)
13563 if(srcBlockIndex > 0)
13566 srcAllocIndex = SIZE_MAX;
13576 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13579 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13581 if(m_Blocks[i]->m_HasNonMovableAllocations)
13589 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13590 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13591 VkDeviceSize maxBytesToMove,
13592 uint32_t maxAllocationsToMove,
13595 if(!m_AllAllocations && m_AllocationCount == 0)
13600 const size_t blockCount = m_Blocks.size();
13601 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13603 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13605 if(m_AllAllocations)
13607 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13608 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13609 it != pMetadata->m_Suballocations.end();
13612 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13614 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13615 pBlockInfo->m_Allocations.push_back(allocInfo);
13620 pBlockInfo->CalcHasNonMovableAllocations();
13624 pBlockInfo->SortAllocationsByOffsetDescending();
13630 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
13633 const uint32_t roundCount = 2;
13636 VkResult result = VK_SUCCESS;
13637 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
13645 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
13646 size_t dstBlockIndex, VkDeviceSize dstOffset,
13647 size_t srcBlockIndex, VkDeviceSize srcOffset)
13649 if(dstBlockIndex < srcBlockIndex)
13653 if(dstBlockIndex > srcBlockIndex)
13657 if(dstOffset < srcOffset)
13667 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
13669 VmaBlockVector* pBlockVector,
13670 uint32_t currentFrameIndex,
13671 bool overlappingMoveSupported) :
13672 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13673 m_OverlappingMoveSupported(overlappingMoveSupported),
13674 m_AllocationCount(0),
13675 m_AllAllocations(false),
13677 m_AllocationsMoved(0),
13678 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
13680 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
13684 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
13688 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
13689 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13690 VkDeviceSize maxBytesToMove,
13691 uint32_t maxAllocationsToMove,
13694 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
13696 const size_t blockCount = m_pBlockVector->GetBlockCount();
13697 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
13702 PreprocessMetadata();
13706 m_BlockInfos.resize(blockCount);
13707 for(
size_t i = 0; i < blockCount; ++i)
13709 m_BlockInfos[i].origBlockIndex = i;
13712 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13713 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13714 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13719 FreeSpaceDatabase freeSpaceDb;
13721 size_t dstBlockInfoIndex = 0;
13722 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13723 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13724 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13725 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13726 VkDeviceSize dstOffset = 0;
13729 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13731 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13732 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13733 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13734 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13735 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13737 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13738 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13739 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13740 if(m_AllocationsMoved == maxAllocationsToMove ||
13741 m_BytesMoved + srcAllocSize > maxBytesToMove)
13746 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13748 VmaDefragmentationMove move = {};
13750 size_t freeSpaceInfoIndex;
13751 VkDeviceSize dstAllocOffset;
13752 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13753 freeSpaceInfoIndex, dstAllocOffset))
13755 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13756 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13757 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13760 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13762 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13766 VmaSuballocation suballoc = *srcSuballocIt;
13767 suballoc.offset = dstAllocOffset;
13768 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13769 m_BytesMoved += srcAllocSize;
13770 ++m_AllocationsMoved;
13772 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13774 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13775 srcSuballocIt = nextSuballocIt;
13777 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13779 move.srcBlockIndex = srcOrigBlockIndex;
13780 move.dstBlockIndex = freeSpaceOrigBlockIndex;
13781 move.srcOffset = srcAllocOffset;
13782 move.dstOffset = dstAllocOffset;
13783 move.size = srcAllocSize;
13785 moves.push_back(move);
13792 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13794 VmaSuballocation suballoc = *srcSuballocIt;
13795 suballoc.offset = dstAllocOffset;
13796 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13797 m_BytesMoved += srcAllocSize;
13798 ++m_AllocationsMoved;
13800 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13802 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13803 srcSuballocIt = nextSuballocIt;
13805 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13807 move.srcBlockIndex = srcOrigBlockIndex;
13808 move.dstBlockIndex = freeSpaceOrigBlockIndex;
13809 move.srcOffset = srcAllocOffset;
13810 move.dstOffset = dstAllocOffset;
13811 move.size = srcAllocSize;
13813 moves.push_back(move);
13818 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13821 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13822 dstAllocOffset + srcAllocSize > dstBlockSize)
13825 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13827 ++dstBlockInfoIndex;
13828 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13829 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13830 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13831 dstBlockSize = pDstMetadata->GetSize();
13833 dstAllocOffset = 0;
13837 if(dstBlockInfoIndex == srcBlockInfoIndex)
13839 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13841 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13843 bool skipOver = overlap;
13844 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13848 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13853 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13855 dstOffset = srcAllocOffset + srcAllocSize;
13861 srcSuballocIt->offset = dstAllocOffset;
13862 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13863 dstOffset = dstAllocOffset + srcAllocSize;
13864 m_BytesMoved += srcAllocSize;
13865 ++m_AllocationsMoved;
13868 move.srcBlockIndex = srcOrigBlockIndex;
13869 move.dstBlockIndex = dstOrigBlockIndex;
13870 move.srcOffset = srcAllocOffset;
13871 move.dstOffset = dstAllocOffset;
13872 move.size = srcAllocSize;
13874 moves.push_back(move);
13882 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13883 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13885 VmaSuballocation suballoc = *srcSuballocIt;
13886 suballoc.offset = dstAllocOffset;
13887 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13888 dstOffset = dstAllocOffset + srcAllocSize;
13889 m_BytesMoved += srcAllocSize;
13890 ++m_AllocationsMoved;
13892 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13894 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13895 srcSuballocIt = nextSuballocIt;
13897 pDstMetadata->m_Suballocations.push_back(suballoc);
13899 move.srcBlockIndex = srcOrigBlockIndex;
13900 move.dstBlockIndex = dstOrigBlockIndex;
13901 move.srcOffset = srcAllocOffset;
13902 move.dstOffset = dstAllocOffset;
13903 move.size = srcAllocSize;
13905 moves.push_back(move);
13911 m_BlockInfos.clear();
13913 PostprocessMetadata();
13918 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13920 const size_t blockCount = m_pBlockVector->GetBlockCount();
13921 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13923 VmaBlockMetadata_Generic*
const pMetadata =
13924 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13925 pMetadata->m_FreeCount = 0;
13926 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13927 pMetadata->m_FreeSuballocationsBySize.clear();
13928 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13929 it != pMetadata->m_Suballocations.end(); )
13931 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13933 VmaSuballocationList::iterator nextIt = it;
13935 pMetadata->m_Suballocations.erase(it);
13946 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13948 const size_t blockCount = m_pBlockVector->GetBlockCount();
13949 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13951 VmaBlockMetadata_Generic*
const pMetadata =
13952 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13953 const VkDeviceSize blockSize = pMetadata->GetSize();
13956 if(pMetadata->m_Suballocations.empty())
13958 pMetadata->m_FreeCount = 1;
13960 VmaSuballocation suballoc = {
13964 VMA_SUBALLOCATION_TYPE_FREE };
13965 pMetadata->m_Suballocations.push_back(suballoc);
13966 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13971 VkDeviceSize offset = 0;
13972 VmaSuballocationList::iterator it;
13973 for(it = pMetadata->m_Suballocations.begin();
13974 it != pMetadata->m_Suballocations.end();
13977 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13978 VMA_ASSERT(it->offset >= offset);
13981 if(it->offset > offset)
13983 ++pMetadata->m_FreeCount;
13984 const VkDeviceSize freeSize = it->offset - offset;
13985 VmaSuballocation suballoc = {
13989 VMA_SUBALLOCATION_TYPE_FREE };
13990 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13991 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13993 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13997 pMetadata->m_SumFreeSize -= it->size;
13998 offset = it->offset + it->size;
14002 if(offset < blockSize)
14004 ++pMetadata->m_FreeCount;
14005 const VkDeviceSize freeSize = blockSize - offset;
14006 VmaSuballocation suballoc = {
14010 VMA_SUBALLOCATION_TYPE_FREE };
14011 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14012 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14013 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14015 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14020 pMetadata->m_FreeSuballocationsBySize.begin(),
14021 pMetadata->m_FreeSuballocationsBySize.end(),
14022 VmaSuballocationItemSizeLess());
14025 VMA_HEAVY_ASSERT(pMetadata->Validate());
14029 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14032 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14033 while(it != pMetadata->m_Suballocations.end())
14035 if(it->offset < suballoc.offset)
14040 pMetadata->m_Suballocations.insert(it, suballoc);
14046 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14049 VmaBlockVector* pBlockVector,
14050 uint32_t currFrameIndex) :
14052 mutexLocked(false),
14053 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14054 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14055 defragmentationMovesProcessed(0),
14056 defragmentationMovesCommitted(0),
14057 hasDefragmentationPlan(0),
14058 m_hAllocator(hAllocator),
14059 m_hCustomPool(hCustomPool),
14060 m_pBlockVector(pBlockVector),
14061 m_CurrFrameIndex(currFrameIndex),
14062 m_pAlgorithm(VMA_NULL),
14063 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14064 m_AllAllocations(false)
14068 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14070 vma_delete(m_hAllocator, m_pAlgorithm);
14073 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14075 AllocInfo info = { hAlloc, pChanged };
14076 m_Allocations.push_back(info);
14079 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14081 const bool allAllocations = m_AllAllocations ||
14082 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
14095 if(VMA_DEBUG_MARGIN == 0 &&
14097 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14100 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14101 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14105 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
14106 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14111 m_pAlgorithm->AddAll();
14115 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
14117 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
14125 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
14127 uint32_t currFrameIndex,
14130 m_hAllocator(hAllocator),
14131 m_CurrFrameIndex(currFrameIndex),
14134 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
14136 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
14139 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
14141 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14143 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
14144 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
14145 vma_delete(m_hAllocator, pBlockVectorCtx);
14147 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
14149 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
14150 if(pBlockVectorCtx)
14152 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
14153 vma_delete(m_hAllocator, pBlockVectorCtx);
14158 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
14160 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
14162 VmaPool pool = pPools[poolIndex];
14165 if(pool->m_BlockVector.GetAlgorithm() == 0)
14167 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14169 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14171 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
14173 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14178 if(!pBlockVectorDefragCtx)
14180 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14183 &pool->m_BlockVector,
14185 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14188 pBlockVectorDefragCtx->AddAll();
14193 void VmaDefragmentationContext_T::AddAllocations(
14194 uint32_t allocationCount,
14196 VkBool32* pAllocationsChanged)
14199 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14202 VMA_ASSERT(hAlloc);
14204 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
14206 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
14208 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14210 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
14212 if(hAllocPool != VK_NULL_HANDLE)
14215 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
14217 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14219 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
14221 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14225 if(!pBlockVectorDefragCtx)
14227 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14230 &hAllocPool->m_BlockVector,
14232 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14239 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
14240 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
14241 if(!pBlockVectorDefragCtx)
14243 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14246 m_hAllocator->m_pBlockVectors[memTypeIndex],
14248 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
14252 if(pBlockVectorDefragCtx)
14254 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
14255 &pAllocationsChanged[allocIndex] : VMA_NULL;
14256 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
14262 VkResult VmaDefragmentationContext_T::Defragment(
14263 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
14264 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
14276 m_MaxCpuBytesToMove = maxCpuBytesToMove;
14277 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
14279 m_MaxGpuBytesToMove = maxGpuBytesToMove;
14280 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
14282 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
14283 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
14286 return VK_NOT_READY;
14289 if(commandBuffer == VK_NULL_HANDLE)
14291 maxGpuBytesToMove = 0;
14292 maxGpuAllocationsToMove = 0;
14295 VkResult res = VK_SUCCESS;
14298 for(uint32_t memTypeIndex = 0;
14299 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
14302 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14303 if(pBlockVectorCtx)
14305 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14306 pBlockVectorCtx->GetBlockVector()->Defragment(
14309 maxCpuBytesToMove, maxCpuAllocationsToMove,
14310 maxGpuBytesToMove, maxGpuAllocationsToMove,
14312 if(pBlockVectorCtx->res != VK_SUCCESS)
14314 res = pBlockVectorCtx->res;
14320 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14321 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
14324 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14325 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14326 pBlockVectorCtx->GetBlockVector()->Defragment(
14329 maxCpuBytesToMove, maxCpuAllocationsToMove,
14330 maxGpuBytesToMove, maxGpuAllocationsToMove,
14332 if(pBlockVectorCtx->res != VK_SUCCESS)
14334 res = pBlockVectorCtx->res;
14347 for(uint32_t memTypeIndex = 0;
14348 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14351 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14352 if(pBlockVectorCtx)
14354 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14356 if(!pBlockVectorCtx->hasDefragmentationPlan)
14358 pBlockVectorCtx->GetBlockVector()->Defragment(
14361 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14362 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14365 if(pBlockVectorCtx->res < VK_SUCCESS)
14368 pBlockVectorCtx->hasDefragmentationPlan =
true;
14371 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14373 pCurrentMove, movesLeft);
14375 movesLeft -= processed;
14376 pCurrentMove += processed;
14381 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14382 customCtxIndex < customCtxCount;
14385 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14386 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14388 if(!pBlockVectorCtx->hasDefragmentationPlan)
14390 pBlockVectorCtx->GetBlockVector()->Defragment(
14393 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14394 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14397 if(pBlockVectorCtx->res < VK_SUCCESS)
14400 pBlockVectorCtx->hasDefragmentationPlan =
true;
14403 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14405 pCurrentMove, movesLeft);
14407 movesLeft -= processed;
14408 pCurrentMove += processed;
14415 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
14417 VkResult res = VK_SUCCESS;
14420 for(uint32_t memTypeIndex = 0;
14421 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14424 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14425 if(pBlockVectorCtx)
14427 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14429 if(!pBlockVectorCtx->hasDefragmentationPlan)
14431 res = VK_NOT_READY;
14435 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14436 pBlockVectorCtx, m_pStats);
14438 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14439 res = VK_NOT_READY;
14444 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14445 customCtxIndex < customCtxCount;
14448 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14449 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14451 if(!pBlockVectorCtx->hasDefragmentationPlan)
14453 res = VK_NOT_READY;
14457 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14458 pBlockVectorCtx, m_pStats);
14460 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14461 res = VK_NOT_READY;
14470 #if VMA_RECORDING_ENABLED
14472 VmaRecorder::VmaRecorder() :
14477 m_StartCounter(INT64_MAX)
14483 m_UseMutex = useMutex;
14484 m_Flags = settings.
flags;
14486 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
14487 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
14490 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
14493 return VK_ERROR_INITIALIZATION_FAILED;
14497 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
14498 fprintf(m_File,
"%s\n",
"1,8");
14503 VmaRecorder::~VmaRecorder()
14505 if(m_File != VMA_NULL)
14511 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
14513 CallParams callParams;
14514 GetBasicParams(callParams);
14516 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14517 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
14521 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
14523 CallParams callParams;
14524 GetBasicParams(callParams);
14526 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14527 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
14533 CallParams callParams;
14534 GetBasicParams(callParams);
14536 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14537 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
14548 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
14550 CallParams callParams;
14551 GetBasicParams(callParams);
14553 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14554 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
14559 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
14560 const VkMemoryRequirements& vkMemReq,
14564 CallParams callParams;
14565 GetBasicParams(callParams);
14567 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14568 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14569 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14571 vkMemReq.alignment,
14572 vkMemReq.memoryTypeBits,
14580 userDataStr.GetString());
14584 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
14585 const VkMemoryRequirements& vkMemReq,
14587 uint64_t allocationCount,
14590 CallParams callParams;
14591 GetBasicParams(callParams);
14593 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14594 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14595 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14597 vkMemReq.alignment,
14598 vkMemReq.memoryTypeBits,
14605 PrintPointerList(allocationCount, pAllocations);
14606 fprintf(m_File,
",%s\n", userDataStr.GetString());
14610 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
14611 const VkMemoryRequirements& vkMemReq,
14612 bool requiresDedicatedAllocation,
14613 bool prefersDedicatedAllocation,
14617 CallParams callParams;
14618 GetBasicParams(callParams);
14620 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14621 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14622 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14624 vkMemReq.alignment,
14625 vkMemReq.memoryTypeBits,
14626 requiresDedicatedAllocation ? 1 : 0,
14627 prefersDedicatedAllocation ? 1 : 0,
14635 userDataStr.GetString());
14639 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
14640 const VkMemoryRequirements& vkMemReq,
14641 bool requiresDedicatedAllocation,
14642 bool prefersDedicatedAllocation,
14646 CallParams callParams;
14647 GetBasicParams(callParams);
14649 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14650 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14651 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14653 vkMemReq.alignment,
14654 vkMemReq.memoryTypeBits,
14655 requiresDedicatedAllocation ? 1 : 0,
14656 prefersDedicatedAllocation ? 1 : 0,
14664 userDataStr.GetString());
14668 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
14671 CallParams callParams;
14672 GetBasicParams(callParams);
14674 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14675 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14680 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
14681 uint64_t allocationCount,
14684 CallParams callParams;
14685 GetBasicParams(callParams);
14687 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14688 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
14689 PrintPointerList(allocationCount, pAllocations);
14690 fprintf(m_File,
"\n");
14694 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
14696 const void* pUserData)
14698 CallParams callParams;
14699 GetBasicParams(callParams);
14701 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14702 UserDataString userDataStr(
14705 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14707 userDataStr.GetString());
14711 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
14714 CallParams callParams;
14715 GetBasicParams(callParams);
14717 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14718 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14723 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
14726 CallParams callParams;
14727 GetBasicParams(callParams);
14729 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14730 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14735 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
14738 CallParams callParams;
14739 GetBasicParams(callParams);
14741 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14742 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14747 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
14748 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14750 CallParams callParams;
14751 GetBasicParams(callParams);
14753 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14754 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14761 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
14762 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14764 CallParams callParams;
14765 GetBasicParams(callParams);
14767 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14768 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14775 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
14776 const VkBufferCreateInfo& bufCreateInfo,
14780 CallParams callParams;
14781 GetBasicParams(callParams);
14783 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14784 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14785 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14786 bufCreateInfo.flags,
14787 bufCreateInfo.size,
14788 bufCreateInfo.usage,
14789 bufCreateInfo.sharingMode,
14790 allocCreateInfo.
flags,
14791 allocCreateInfo.
usage,
14795 allocCreateInfo.
pool,
14797 userDataStr.GetString());
14801 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
14802 const VkImageCreateInfo& imageCreateInfo,
14806 CallParams callParams;
14807 GetBasicParams(callParams);
14809 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14810 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14811 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14812 imageCreateInfo.flags,
14813 imageCreateInfo.imageType,
14814 imageCreateInfo.format,
14815 imageCreateInfo.extent.width,
14816 imageCreateInfo.extent.height,
14817 imageCreateInfo.extent.depth,
14818 imageCreateInfo.mipLevels,
14819 imageCreateInfo.arrayLayers,
14820 imageCreateInfo.samples,
14821 imageCreateInfo.tiling,
14822 imageCreateInfo.usage,
14823 imageCreateInfo.sharingMode,
14824 imageCreateInfo.initialLayout,
14825 allocCreateInfo.
flags,
14826 allocCreateInfo.
usage,
14830 allocCreateInfo.
pool,
14832 userDataStr.GetString());
14836 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
14839 CallParams callParams;
14840 GetBasicParams(callParams);
14842 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14843 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
14848 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14851 CallParams callParams;
14852 GetBasicParams(callParams);
14854 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14855 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
14860 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
14863 CallParams callParams;
14864 GetBasicParams(callParams);
14866 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14867 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14872 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14875 CallParams callParams;
14876 GetBasicParams(callParams);
14878 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14879 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14884 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14887 CallParams callParams;
14888 GetBasicParams(callParams);
14890 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14891 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14896 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14900 CallParams callParams;
14901 GetBasicParams(callParams);
14903 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14904 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14907 fprintf(m_File,
",");
14909 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14919 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14922 CallParams callParams;
14923 GetBasicParams(callParams);
14925 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14926 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14931 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
14935 CallParams callParams;
14936 GetBasicParams(callParams);
14938 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14939 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14940 pool, name != VMA_NULL ? name :
"");
14946 if(pUserData != VMA_NULL)
14950 m_Str = (
const char*)pUserData;
14954 sprintf_s(m_PtrStr,
"%p", pUserData);
14964 void VmaRecorder::WriteConfiguration(
14965 const VkPhysicalDeviceProperties& devProps,
14966 const VkPhysicalDeviceMemoryProperties& memProps,
14967 uint32_t vulkanApiVersion,
14968 bool dedicatedAllocationExtensionEnabled,
14969 bool bindMemory2ExtensionEnabled,
14970 bool memoryBudgetExtensionEnabled,
14971 bool deviceCoherentMemoryExtensionEnabled)
14973 fprintf(m_File,
"Config,Begin\n");
14975 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
14977 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14978 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14979 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14980 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14981 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14982 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14984 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14985 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14986 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14988 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14989 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14991 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14992 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14994 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14995 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14997 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14998 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15001 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15002 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15003 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15004 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15006 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15007 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
15008 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15009 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15010 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15011 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15012 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15013 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15014 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15016 fprintf(m_File,
"Config,End\n");
15019 void VmaRecorder::GetBasicParams(CallParams& outParams)
15021 outParams.threadId = GetCurrentThreadId();
15023 LARGE_INTEGER counter;
15024 QueryPerformanceCounter(&counter);
15025 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
15028 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15032 fprintf(m_File,
"%p", pItems[0]);
15033 for(uint64_t i = 1; i < count; ++i)
15035 fprintf(m_File,
" %p", pItems[i]);
15040 void VmaRecorder::Flush()
15048 #endif // #if VMA_RECORDING_ENABLED
15053 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15054 m_Allocator(pAllocationCallbacks, 1024)
15058 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15060 VmaMutexLock mutexLock(m_Mutex);
15061 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15064 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15066 VmaMutexLock mutexLock(m_Mutex);
15067 m_Allocator.Free(hAlloc);
15075 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
15081 m_hDevice(pCreateInfo->device),
15082 m_hInstance(pCreateInfo->instance),
15083 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
15084 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
15085 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
15086 m_AllocationObjectAllocator(&m_AllocationCallbacks),
15087 m_HeapSizeLimitMask(0),
15088 m_PreferredLargeHeapBlockSize(0),
15089 m_PhysicalDevice(pCreateInfo->physicalDevice),
15090 m_CurrentFrameIndex(0),
15091 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
15092 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
15094 m_GlobalMemoryTypeBits(UINT32_MAX)
15096 ,m_pRecorder(VMA_NULL)
15099 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15101 m_UseKhrDedicatedAllocation =
false;
15102 m_UseKhrBindMemory2 =
false;
15105 if(VMA_DEBUG_DETECT_CORRUPTION)
15108 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
15113 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15115 #if !(VMA_DEDICATED_ALLOCATION)
15118 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
15121 #if !(VMA_BIND_MEMORY2)
15124 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
15128 #if !(VMA_MEMORY_BUDGET)
15131 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
15134 #if !(VMA_BUFFER_DEVICE_ADDRESS)
15135 if(m_UseKhrBufferDeviceAddress)
15137 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
15140 #if VMA_VULKAN_VERSION < 1002000
15141 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
15143 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
15146 #if VMA_VULKAN_VERSION < 1001000
15147 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15149 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
15153 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
15154 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
15155 memset(&m_MemProps, 0,
sizeof(m_MemProps));
15157 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
15158 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
15159 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
15169 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
15170 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
15172 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
15173 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
15174 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
15175 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
15180 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
15184 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15186 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
15187 if(limit != VK_WHOLE_SIZE)
15189 m_HeapSizeLimitMask |= 1u << heapIndex;
15190 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
15192 m_MemProps.memoryHeaps[heapIndex].size = limit;
15198 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15200 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
15202 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
15206 preferredBlockSize,
15209 GetBufferImageGranularity(),
15215 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
15222 VkResult res = VK_SUCCESS;
15227 #if VMA_RECORDING_ENABLED
15228 m_pRecorder = vma_new(
this, VmaRecorder)();
15230 if(res != VK_SUCCESS)
15234 m_pRecorder->WriteConfiguration(
15235 m_PhysicalDeviceProperties,
15237 m_VulkanApiVersion,
15238 m_UseKhrDedicatedAllocation,
15239 m_UseKhrBindMemory2,
15240 m_UseExtMemoryBudget,
15241 m_UseAmdDeviceCoherentMemory);
15242 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
15244 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
15245 return VK_ERROR_FEATURE_NOT_PRESENT;
15249 #if VMA_MEMORY_BUDGET
15250 if(m_UseExtMemoryBudget)
15252 UpdateVulkanBudget();
15254 #endif // #if VMA_MEMORY_BUDGET
15259 VmaAllocator_T::~VmaAllocator_T()
15261 #if VMA_RECORDING_ENABLED
15262 if(m_pRecorder != VMA_NULL)
15264 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
15265 vma_delete(
this, m_pRecorder);
15269 VMA_ASSERT(m_Pools.empty());
15271 for(
size_t i = GetMemoryTypeCount(); i--; )
15273 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
15275 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
15278 vma_delete(
this, m_pDedicatedAllocations[i]);
15279 vma_delete(
this, m_pBlockVectors[i]);
15283 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
15285 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15286 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
15287 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
15288 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
15289 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
15290 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
15291 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
15292 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
15293 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
15294 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
15295 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
15296 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
15297 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
15298 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
15299 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
15300 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
15301 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
15302 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
15303 #if VMA_VULKAN_VERSION >= 1001000
15304 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15306 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
15307 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
15308 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2");
15309 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
15310 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2");
15311 m_VulkanFunctions.vkBindBufferMemory2KHR =
15312 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2");
15313 m_VulkanFunctions.vkBindImageMemory2KHR =
15314 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2");
15315 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
15316 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2");
15319 #if VMA_DEDICATED_ALLOCATION
15320 if(m_UseKhrDedicatedAllocation)
15322 if(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR ==
nullptr)
15324 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
15325 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
15327 if(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR ==
nullptr)
15329 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
15330 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
15334 #if VMA_BIND_MEMORY2
15335 if(m_UseKhrBindMemory2)
15337 if(m_VulkanFunctions.vkBindBufferMemory2KHR ==
nullptr)
15339 m_VulkanFunctions.vkBindBufferMemory2KHR =
15340 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2KHR");
15342 if(m_VulkanFunctions.vkBindImageMemory2KHR ==
nullptr)
15344 m_VulkanFunctions.vkBindImageMemory2KHR =
15345 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2KHR");
15348 #endif // #if VMA_BIND_MEMORY2
15349 #if VMA_MEMORY_BUDGET
15350 if(m_UseExtMemoryBudget && m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15352 VMA_ASSERT(m_hInstance != VK_NULL_HANDLE);
15353 if(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR ==
nullptr)
15355 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR =
15356 (PFN_vkGetPhysicalDeviceMemoryProperties2KHR)vkGetInstanceProcAddr(m_hInstance,
"vkGetPhysicalDeviceMemoryProperties2KHR");
15359 #endif // #if VMA_MEMORY_BUDGET
15360 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15362 #define VMA_COPY_IF_NOT_NULL(funcName) \
15363 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
15365 if(pVulkanFunctions != VMA_NULL)
15367 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
15368 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
15369 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
15370 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
15371 VMA_COPY_IF_NOT_NULL(vkMapMemory);
15372 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
15373 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
15374 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
15375 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
15376 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
15377 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
15378 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
15379 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
15380 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
15381 VMA_COPY_IF_NOT_NULL(vkCreateImage);
15382 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
15383 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
15384 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15385 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
15386 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
15388 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15389 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
15390 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
15392 #if VMA_MEMORY_BUDGET
15393 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
15397 #undef VMA_COPY_IF_NOT_NULL
15401 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
15402 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
15403 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
15404 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
15405 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
15406 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
15407 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
15408 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
15409 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
15410 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
15411 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
15412 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
15413 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
15414 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
15415 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
15416 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
15417 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
15418 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15419 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
15421 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
15422 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
15425 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15426 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
15428 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
15429 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
15432 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
15433 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15435 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
15440 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
15442 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15443 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15444 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
15445 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
15448 VkResult VmaAllocator_T::AllocateMemoryOfType(
15450 VkDeviceSize alignment,
15451 bool dedicatedAllocation,
15452 VkBuffer dedicatedBuffer,
15453 VkBufferUsageFlags dedicatedBufferUsage,
15454 VkImage dedicatedImage,
15456 uint32_t memTypeIndex,
15457 VmaSuballocationType suballocType,
15458 size_t allocationCount,
15461 VMA_ASSERT(pAllocations != VMA_NULL);
15462 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
15468 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15478 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
15479 VMA_ASSERT(blockVector);
15481 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
15482 bool preferDedicatedMemory =
15483 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
15484 dedicatedAllocation ||
15486 size > preferredBlockSize / 2;
15488 if(preferDedicatedMemory &&
15490 finalCreateInfo.
pool == VK_NULL_HANDLE)
15499 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15503 return AllocateDedicatedMemory(
15512 dedicatedBufferUsage,
15520 VkResult res = blockVector->Allocate(
15521 m_CurrentFrameIndex.load(),
15528 if(res == VK_SUCCESS)
15536 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15540 res = AllocateDedicatedMemory(
15549 dedicatedBufferUsage,
15553 if(res == VK_SUCCESS)
15556 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
15562 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15569 VkResult VmaAllocator_T::AllocateDedicatedMemory(
15571 VmaSuballocationType suballocType,
15572 uint32_t memTypeIndex,
15575 bool isUserDataString,
15577 VkBuffer dedicatedBuffer,
15578 VkBufferUsageFlags dedicatedBufferUsage,
15579 VkImage dedicatedImage,
15580 size_t allocationCount,
15583 VMA_ASSERT(allocationCount > 0 && pAllocations);
15587 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15589 GetBudget(&heapBudget, heapIndex, 1);
15590 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
15592 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15596 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
15597 allocInfo.memoryTypeIndex = memTypeIndex;
15598 allocInfo.allocationSize = size;
15600 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15601 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
15602 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15604 if(dedicatedBuffer != VK_NULL_HANDLE)
15606 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
15607 dedicatedAllocInfo.buffer = dedicatedBuffer;
15608 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
15610 else if(dedicatedImage != VK_NULL_HANDLE)
15612 dedicatedAllocInfo.image = dedicatedImage;
15613 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
15616 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15618 #if VMA_BUFFER_DEVICE_ADDRESS
15619 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
15620 if(m_UseKhrBufferDeviceAddress)
15622 bool canContainBufferWithDeviceAddress =
true;
15623 if(dedicatedBuffer != VK_NULL_HANDLE)
15625 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
15626 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
15628 else if(dedicatedImage != VK_NULL_HANDLE)
15630 canContainBufferWithDeviceAddress =
false;
15632 if(canContainBufferWithDeviceAddress)
15634 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT;
15635 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
15638 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
15641 VkResult res = VK_SUCCESS;
15642 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15644 res = AllocateDedicatedMemoryPage(
15652 pAllocations + allocIndex);
15653 if(res != VK_SUCCESS)
15659 if(res == VK_SUCCESS)
15663 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15664 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15665 VMA_ASSERT(pDedicatedAllocations);
15666 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15668 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
15672 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
15677 while(allocIndex--)
15680 VkDeviceMemory hMemory = currAlloc->GetMemory();
15692 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
15693 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
15694 currAlloc->SetUserData(
this, VMA_NULL);
15695 m_AllocationObjectAllocator.Free(currAlloc);
15698 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15704 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
15706 VmaSuballocationType suballocType,
15707 uint32_t memTypeIndex,
15708 const VkMemoryAllocateInfo& allocInfo,
15710 bool isUserDataString,
15714 VkDeviceMemory hMemory = VK_NULL_HANDLE;
15715 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
15718 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15722 void* pMappedData = VMA_NULL;
15725 res = (*m_VulkanFunctions.vkMapMemory)(
15734 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
15735 FreeVulkanMemory(memTypeIndex, size, hMemory);
15740 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
15741 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
15742 (*pAllocation)->SetUserData(
this, pUserData);
15743 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
15744 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15746 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
15752 void VmaAllocator_T::GetBufferMemoryRequirements(
15754 VkMemoryRequirements& memReq,
15755 bool& requiresDedicatedAllocation,
15756 bool& prefersDedicatedAllocation)
const
15758 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15759 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15761 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
15762 memReqInfo.buffer = hBuffer;
15764 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15766 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15767 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
15769 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15771 memReq = memReq2.memoryRequirements;
15772 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15773 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15776 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15778 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
15779 requiresDedicatedAllocation =
false;
15780 prefersDedicatedAllocation =
false;
15784 void VmaAllocator_T::GetImageMemoryRequirements(
15786 VkMemoryRequirements& memReq,
15787 bool& requiresDedicatedAllocation,
15788 bool& prefersDedicatedAllocation)
const
15790 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15791 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15793 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
15794 memReqInfo.image = hImage;
15796 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15798 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15799 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
15801 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15803 memReq = memReq2.memoryRequirements;
15804 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15805 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15808 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15810 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
15811 requiresDedicatedAllocation =
false;
15812 prefersDedicatedAllocation =
false;
15816 VkResult VmaAllocator_T::AllocateMemory(
15817 const VkMemoryRequirements& vkMemReq,
15818 bool requiresDedicatedAllocation,
15819 bool prefersDedicatedAllocation,
15820 VkBuffer dedicatedBuffer,
15821 VkBufferUsageFlags dedicatedBufferUsage,
15822 VkImage dedicatedImage,
15824 VmaSuballocationType suballocType,
15825 size_t allocationCount,
15828 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15830 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
15832 if(vkMemReq.size == 0)
15834 return VK_ERROR_VALIDATION_FAILED_EXT;
15839 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
15840 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15845 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
15846 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15848 if(requiresDedicatedAllocation)
15852 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
15853 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15855 if(createInfo.
pool != VK_NULL_HANDLE)
15857 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
15858 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15861 if((createInfo.
pool != VK_NULL_HANDLE) &&
15864 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
15865 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15868 if(createInfo.
pool != VK_NULL_HANDLE)
15870 const VkDeviceSize alignmentForPool = VMA_MAX(
15871 vkMemReq.alignment,
15872 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
15877 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15882 return createInfo.
pool->m_BlockVector.Allocate(
15883 m_CurrentFrameIndex.load(),
15894 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
15895 uint32_t memTypeIndex = UINT32_MAX;
15897 if(res == VK_SUCCESS)
15899 VkDeviceSize alignmentForMemType = VMA_MAX(
15900 vkMemReq.alignment,
15901 GetMemoryTypeMinAlignment(memTypeIndex));
15903 res = AllocateMemoryOfType(
15905 alignmentForMemType,
15906 requiresDedicatedAllocation || prefersDedicatedAllocation,
15908 dedicatedBufferUsage,
15916 if(res == VK_SUCCESS)
15926 memoryTypeBits &= ~(1u << memTypeIndex);
15929 if(res == VK_SUCCESS)
15931 alignmentForMemType = VMA_MAX(
15932 vkMemReq.alignment,
15933 GetMemoryTypeMinAlignment(memTypeIndex));
15935 res = AllocateMemoryOfType(
15937 alignmentForMemType,
15938 requiresDedicatedAllocation || prefersDedicatedAllocation,
15940 dedicatedBufferUsage,
15948 if(res == VK_SUCCESS)
15958 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15969 void VmaAllocator_T::FreeMemory(
15970 size_t allocationCount,
15973 VMA_ASSERT(pAllocations);
15975 for(
size_t allocIndex = allocationCount; allocIndex--; )
15979 if(allocation != VK_NULL_HANDLE)
15981 if(TouchAllocation(allocation))
15983 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15985 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
15988 switch(allocation->GetType())
15990 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15992 VmaBlockVector* pBlockVector = VMA_NULL;
15993 VmaPool hPool = allocation->GetBlock()->GetParentPool();
15994 if(hPool != VK_NULL_HANDLE)
15996 pBlockVector = &hPool->m_BlockVector;
16000 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16001 pBlockVector = m_pBlockVectors[memTypeIndex];
16003 pBlockVector->Free(allocation);
16006 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16007 FreeDedicatedMemory(allocation);
16015 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
16016 allocation->SetUserData(
this, VMA_NULL);
16017 m_AllocationObjectAllocator.Free(allocation);
16022 VkResult VmaAllocator_T::ResizeAllocation(
16024 VkDeviceSize newSize)
16027 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
16029 return VK_ERROR_VALIDATION_FAILED_EXT;
16031 if(newSize == alloc->GetSize())
16035 return VK_ERROR_OUT_OF_POOL_MEMORY;
16038 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
16041 InitStatInfo(pStats->
total);
16042 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
16044 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
16048 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16050 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16051 VMA_ASSERT(pBlockVector);
16052 pBlockVector->AddStats(pStats);
16057 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16058 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16060 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
16065 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16067 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16068 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16069 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16070 VMA_ASSERT(pDedicatedAllocVector);
16071 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
16074 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
16075 VmaAddStatInfo(pStats->
total, allocationStatInfo);
16076 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
16077 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
16082 VmaPostprocessCalcStatInfo(pStats->
total);
16083 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
16084 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
16085 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
16086 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
16089 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
16091 #if VMA_MEMORY_BUDGET
16092 if(m_UseExtMemoryBudget)
16094 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
16096 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
16097 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16099 const uint32_t heapIndex = firstHeap + i;
16101 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16104 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
16106 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
16107 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16111 outBudget->
usage = 0;
16115 outBudget->
budget = VMA_MIN(
16116 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
16121 UpdateVulkanBudget();
16122 GetBudget(outBudget, firstHeap, heapCount);
16128 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16130 const uint32_t heapIndex = firstHeap + i;
16132 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16136 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16141 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
16143 VkResult VmaAllocator_T::DefragmentationBegin(
16153 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
16154 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
16157 (*pContext)->AddAllocations(
16160 VkResult res = (*pContext)->Defragment(
16165 if(res != VK_NOT_READY)
16167 vma_delete(
this, *pContext);
16168 *pContext = VMA_NULL;
16174 VkResult VmaAllocator_T::DefragmentationEnd(
16177 vma_delete(
this, context);
16181 VkResult VmaAllocator_T::DefragmentationPassBegin(
16185 return context->DefragmentPassBegin(pInfo);
16187 VkResult VmaAllocator_T::DefragmentationPassEnd(
16190 return context->DefragmentPassEnd();
16196 if(hAllocation->CanBecomeLost())
16202 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16203 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16206 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16210 pAllocationInfo->
offset = 0;
16211 pAllocationInfo->
size = hAllocation->GetSize();
16213 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16216 else if(localLastUseFrameIndex == localCurrFrameIndex)
16218 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16219 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16220 pAllocationInfo->
offset = hAllocation->GetOffset();
16221 pAllocationInfo->
size = hAllocation->GetSize();
16223 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16228 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16230 localLastUseFrameIndex = localCurrFrameIndex;
16237 #if VMA_STATS_STRING_ENABLED
16238 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16239 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16242 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16243 if(localLastUseFrameIndex == localCurrFrameIndex)
16249 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16251 localLastUseFrameIndex = localCurrFrameIndex;
16257 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16258 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16259 pAllocationInfo->
offset = hAllocation->GetOffset();
16260 pAllocationInfo->
size = hAllocation->GetSize();
16261 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
16262 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16266 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
16269 if(hAllocation->CanBecomeLost())
16271 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16272 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16275 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16279 else if(localLastUseFrameIndex == localCurrFrameIndex)
16285 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16287 localLastUseFrameIndex = localCurrFrameIndex;
16294 #if VMA_STATS_STRING_ENABLED
16295 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16296 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16299 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16300 if(localLastUseFrameIndex == localCurrFrameIndex)
16306 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16308 localLastUseFrameIndex = localCurrFrameIndex;
16320 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
16330 return VK_ERROR_INITIALIZATION_FAILED;
16334 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
16336 return VK_ERROR_FEATURE_NOT_PRESENT;
16339 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
16341 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
16343 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
16344 if(res != VK_SUCCESS)
16346 vma_delete(
this, *pPool);
16353 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16354 (*pPool)->SetId(m_NextPoolId++);
16355 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
16361 void VmaAllocator_T::DestroyPool(
VmaPool pool)
16365 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16366 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
16367 VMA_ASSERT(success &&
"Pool not found in Allocator.");
16370 vma_delete(
this, pool);
16375 pool->m_BlockVector.GetPoolStats(pPoolStats);
16378 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
16380 m_CurrentFrameIndex.store(frameIndex);
16382 #if VMA_MEMORY_BUDGET
16383 if(m_UseExtMemoryBudget)
16385 UpdateVulkanBudget();
16387 #endif // #if VMA_MEMORY_BUDGET
16390 void VmaAllocator_T::MakePoolAllocationsLost(
16392 size_t* pLostAllocationCount)
16394 hPool->m_BlockVector.MakePoolAllocationsLost(
16395 m_CurrentFrameIndex.load(),
16396 pLostAllocationCount);
16399 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
16401 return hPool->m_BlockVector.CheckCorruption();
16404 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
16406 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
16409 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16411 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
16413 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16414 VMA_ASSERT(pBlockVector);
16415 VkResult localRes = pBlockVector->CheckCorruption();
16418 case VK_ERROR_FEATURE_NOT_PRESENT:
16421 finalRes = VK_SUCCESS;
16431 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16432 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16434 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
16436 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
16439 case VK_ERROR_FEATURE_NOT_PRESENT:
16442 finalRes = VK_SUCCESS;
16454 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
16456 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
16457 (*pAllocation)->InitLost();
16460 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
16462 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
16465 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
16467 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16468 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
16471 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
16472 if(blockBytesAfterAllocation > heapSize)
16474 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16476 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
16484 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
16488 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
16490 if(res == VK_SUCCESS)
16492 #if VMA_MEMORY_BUDGET
16493 ++m_Budget.m_OperationsSinceBudgetFetch;
16497 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
16499 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
16504 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
16510 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
16513 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
16515 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
16519 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
16521 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
16524 VkResult VmaAllocator_T::BindVulkanBuffer(
16525 VkDeviceMemory memory,
16526 VkDeviceSize memoryOffset,
16530 if(pNext != VMA_NULL)
16532 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16533 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16534 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
16536 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
16537 bindBufferMemoryInfo.pNext = pNext;
16538 bindBufferMemoryInfo.buffer = buffer;
16539 bindBufferMemoryInfo.memory = memory;
16540 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16541 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16544 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16546 return VK_ERROR_EXTENSION_NOT_PRESENT;
16551 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
16555 VkResult VmaAllocator_T::BindVulkanImage(
16556 VkDeviceMemory memory,
16557 VkDeviceSize memoryOffset,
16561 if(pNext != VMA_NULL)
16563 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16564 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16565 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
16567 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
16568 bindBufferMemoryInfo.pNext = pNext;
16569 bindBufferMemoryInfo.image = image;
16570 bindBufferMemoryInfo.memory = memory;
16571 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16572 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16575 #endif // #if VMA_BIND_MEMORY2
16577 return VK_ERROR_EXTENSION_NOT_PRESENT;
16582 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
16586 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
16588 if(hAllocation->CanBecomeLost())
16590 return VK_ERROR_MEMORY_MAP_FAILED;
16593 switch(hAllocation->GetType())
16595 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16597 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16598 char *pBytes = VMA_NULL;
16599 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
16600 if(res == VK_SUCCESS)
16602 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
16603 hAllocation->BlockAllocMap();
16607 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16608 return hAllocation->DedicatedAllocMap(
this, ppData);
16611 return VK_ERROR_MEMORY_MAP_FAILED;
16617 switch(hAllocation->GetType())
16619 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16621 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16622 hAllocation->BlockAllocUnmap();
16623 pBlock->Unmap(
this, 1);
16626 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16627 hAllocation->DedicatedAllocUnmap(
this);
16634 VkResult VmaAllocator_T::BindBufferMemory(
16636 VkDeviceSize allocationLocalOffset,
16640 VkResult res = VK_SUCCESS;
16641 switch(hAllocation->GetType())
16643 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16644 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
16646 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16648 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16649 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
16650 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
16659 VkResult VmaAllocator_T::BindImageMemory(
16661 VkDeviceSize allocationLocalOffset,
16665 VkResult res = VK_SUCCESS;
16666 switch(hAllocation->GetType())
16668 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16669 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
16671 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16673 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
16674 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
16675 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
16684 void VmaAllocator_T::FlushOrInvalidateAllocation(
16686 VkDeviceSize offset, VkDeviceSize size,
16687 VMA_CACHE_OPERATION op)
16689 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
16690 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
16692 const VkDeviceSize allocationSize = hAllocation->GetSize();
16693 VMA_ASSERT(offset <= allocationSize);
16695 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
16697 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
16698 memRange.memory = hAllocation->GetMemory();
16700 switch(hAllocation->GetType())
16702 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16703 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16704 if(size == VK_WHOLE_SIZE)
16706 memRange.size = allocationSize - memRange.offset;
16710 VMA_ASSERT(offset + size <= allocationSize);
16711 memRange.size = VMA_MIN(
16712 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
16713 allocationSize - memRange.offset);
16717 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16720 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16721 if(size == VK_WHOLE_SIZE)
16723 size = allocationSize - offset;
16727 VMA_ASSERT(offset + size <= allocationSize);
16729 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
16732 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
16733 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
16734 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
16735 memRange.offset += allocationOffset;
16736 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
16747 case VMA_CACHE_FLUSH:
16748 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
16750 case VMA_CACHE_INVALIDATE:
16751 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
16760 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
16762 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
16764 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16766 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16767 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16768 VMA_ASSERT(pDedicatedAllocations);
16769 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
16770 VMA_ASSERT(success);
16773 VkDeviceMemory hMemory = allocation->GetMemory();
16785 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
16787 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
16790 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
16792 VkBufferCreateInfo dummyBufCreateInfo;
16793 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
16795 uint32_t memoryTypeBits = 0;
16798 VkBuffer buf = VK_NULL_HANDLE;
16799 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
16800 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
16801 if(res == VK_SUCCESS)
16804 VkMemoryRequirements memReq;
16805 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
16806 memoryTypeBits = memReq.memoryTypeBits;
16809 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
16812 return memoryTypeBits;
16815 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
16818 VMA_ASSERT(GetMemoryTypeCount() > 0);
16820 uint32_t memoryTypeBits = UINT32_MAX;
16822 if(!m_UseAmdDeviceCoherentMemory)
16825 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16827 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
16829 memoryTypeBits &= ~(1u << memTypeIndex);
16834 return memoryTypeBits;
16837 #if VMA_MEMORY_BUDGET
16839 void VmaAllocator_T::UpdateVulkanBudget()
16841 VMA_ASSERT(m_UseExtMemoryBudget);
16843 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
16845 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
16846 VmaPnextChainPushFront(&memProps, &budgetProps);
16848 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
16851 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
16853 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16855 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
16856 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
16857 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
16860 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
16862 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16864 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
16866 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
16868 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
16870 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16873 m_Budget.m_OperationsSinceBudgetFetch = 0;
16877 #endif // #if VMA_MEMORY_BUDGET
16879 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
16881 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
16882 !hAllocation->CanBecomeLost() &&
16883 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16885 void* pData = VMA_NULL;
16886 VkResult res = Map(hAllocation, &pData);
16887 if(res == VK_SUCCESS)
16889 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
16890 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
16891 Unmap(hAllocation);
16895 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
16900 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
16902 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
16903 if(memoryTypeBits == UINT32_MAX)
16905 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
16906 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
16908 return memoryTypeBits;
16911 #if VMA_STATS_STRING_ENABLED
16913 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
16915 bool dedicatedAllocationsStarted =
false;
16916 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16918 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16919 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16920 VMA_ASSERT(pDedicatedAllocVector);
16921 if(pDedicatedAllocVector->empty() ==
false)
16923 if(dedicatedAllocationsStarted ==
false)
16925 dedicatedAllocationsStarted =
true;
16926 json.WriteString(
"DedicatedAllocations");
16927 json.BeginObject();
16930 json.BeginString(
"Type ");
16931 json.ContinueString(memTypeIndex);
16936 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
16938 json.BeginObject(
true);
16940 hAlloc->PrintParameters(json);
16947 if(dedicatedAllocationsStarted)
16953 bool allocationsStarted =
false;
16954 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16956 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
16958 if(allocationsStarted ==
false)
16960 allocationsStarted =
true;
16961 json.WriteString(
"DefaultPools");
16962 json.BeginObject();
16965 json.BeginString(
"Type ");
16966 json.ContinueString(memTypeIndex);
16969 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
16972 if(allocationsStarted)
16980 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16981 const size_t poolCount = m_Pools.size();
16984 json.WriteString(
"Pools");
16985 json.BeginObject();
16986 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
16988 json.BeginString();
16989 json.ContinueString(m_Pools[poolIndex]->GetId());
16992 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
16999 #endif // #if VMA_STATS_STRING_ENABLED
17008 VMA_ASSERT(pCreateInfo && pAllocator);
17011 VMA_DEBUG_LOG(
"vmaCreateAllocator");
17013 return (*pAllocator)->Init(pCreateInfo);
17019 if(allocator != VK_NULL_HANDLE)
17021 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
17022 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
17023 vma_delete(&allocationCallbacks, allocator);
17029 VMA_ASSERT(allocator && pAllocatorInfo);
17030 pAllocatorInfo->
instance = allocator->m_hInstance;
17031 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
17032 pAllocatorInfo->
device = allocator->m_hDevice;
17037 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
17039 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
17040 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
17045 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
17047 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
17048 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
17053 uint32_t memoryTypeIndex,
17054 VkMemoryPropertyFlags* pFlags)
17056 VMA_ASSERT(allocator && pFlags);
17057 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
17058 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
17063 uint32_t frameIndex)
17065 VMA_ASSERT(allocator);
17066 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
17068 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17070 allocator->SetCurrentFrameIndex(frameIndex);
17077 VMA_ASSERT(allocator && pStats);
17078 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17079 allocator->CalculateStats(pStats);
17086 VMA_ASSERT(allocator && pBudget);
17087 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17088 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
17091 #if VMA_STATS_STRING_ENABLED
17095 char** ppStatsString,
17096 VkBool32 detailedMap)
17098 VMA_ASSERT(allocator && ppStatsString);
17099 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17101 VmaStringBuilder sb(allocator);
17103 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
17104 json.BeginObject();
17107 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
17110 allocator->CalculateStats(&stats);
17112 json.WriteString(
"Total");
17113 VmaPrintStatInfo(json, stats.
total);
17115 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
17117 json.BeginString(
"Heap ");
17118 json.ContinueString(heapIndex);
17120 json.BeginObject();
17122 json.WriteString(
"Size");
17123 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
17125 json.WriteString(
"Flags");
17126 json.BeginArray(
true);
17127 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
17129 json.WriteString(
"DEVICE_LOCAL");
17133 json.WriteString(
"Budget");
17134 json.BeginObject();
17136 json.WriteString(
"BlockBytes");
17137 json.WriteNumber(budget[heapIndex].blockBytes);
17138 json.WriteString(
"AllocationBytes");
17139 json.WriteNumber(budget[heapIndex].allocationBytes);
17140 json.WriteString(
"Usage");
17141 json.WriteNumber(budget[heapIndex].usage);
17142 json.WriteString(
"Budget");
17143 json.WriteNumber(budget[heapIndex].budget);
17149 json.WriteString(
"Stats");
17150 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
17153 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
17155 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
17157 json.BeginString(
"Type ");
17158 json.ContinueString(typeIndex);
17161 json.BeginObject();
17163 json.WriteString(
"Flags");
17164 json.BeginArray(
true);
17165 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
17166 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
17168 json.WriteString(
"DEVICE_LOCAL");
17170 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17172 json.WriteString(
"HOST_VISIBLE");
17174 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
17176 json.WriteString(
"HOST_COHERENT");
17178 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
17180 json.WriteString(
"HOST_CACHED");
17182 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
17184 json.WriteString(
"LAZILY_ALLOCATED");
17186 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
17188 json.WriteString(
" PROTECTED");
17190 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17192 json.WriteString(
" DEVICE_COHERENT");
17194 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
17196 json.WriteString(
" DEVICE_UNCACHED");
17202 json.WriteString(
"Stats");
17203 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
17212 if(detailedMap == VK_TRUE)
17214 allocator->PrintDetailedMap(json);
17220 const size_t len = sb.GetLength();
17221 char*
const pChars = vma_new_array(allocator,
char, len + 1);
17224 memcpy(pChars, sb.GetData(), len);
17226 pChars[len] =
'\0';
17227 *ppStatsString = pChars;
17232 char* pStatsString)
17234 if(pStatsString != VMA_NULL)
17236 VMA_ASSERT(allocator);
17237 size_t len = strlen(pStatsString);
17238 vma_delete_array(allocator, pStatsString, len + 1);
17242 #endif // #if VMA_STATS_STRING_ENABLED
17249 uint32_t memoryTypeBits,
17251 uint32_t* pMemoryTypeIndex)
17253 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17254 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17255 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17257 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
17264 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
17265 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
17266 uint32_t notPreferredFlags = 0;
17269 switch(pAllocationCreateInfo->
usage)
17274 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17276 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17280 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
17283 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17284 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17286 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17290 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17291 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
17294 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17297 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
17306 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
17308 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
17311 *pMemoryTypeIndex = UINT32_MAX;
17312 uint32_t minCost = UINT32_MAX;
17313 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
17314 memTypeIndex < allocator->GetMemoryTypeCount();
17315 ++memTypeIndex, memTypeBit <<= 1)
17318 if((memTypeBit & memoryTypeBits) != 0)
17320 const VkMemoryPropertyFlags currFlags =
17321 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
17323 if((requiredFlags & ~currFlags) == 0)
17326 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
17327 VmaCountBitsSet(currFlags & notPreferredFlags);
17329 if(currCost < minCost)
17331 *pMemoryTypeIndex = memTypeIndex;
17336 minCost = currCost;
17341 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
17346 const VkBufferCreateInfo* pBufferCreateInfo,
17348 uint32_t* pMemoryTypeIndex)
17350 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17351 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
17352 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17353 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17355 const VkDevice hDev = allocator->m_hDevice;
17356 VkBuffer hBuffer = VK_NULL_HANDLE;
17357 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
17358 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
17359 if(res == VK_SUCCESS)
17361 VkMemoryRequirements memReq = {};
17362 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
17363 hDev, hBuffer, &memReq);
17367 memReq.memoryTypeBits,
17368 pAllocationCreateInfo,
17371 allocator->GetVulkanFunctions().vkDestroyBuffer(
17372 hDev, hBuffer, allocator->GetAllocationCallbacks());
17379 const VkImageCreateInfo* pImageCreateInfo,
17381 uint32_t* pMemoryTypeIndex)
17383 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17384 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
17385 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17386 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17388 const VkDevice hDev = allocator->m_hDevice;
17389 VkImage hImage = VK_NULL_HANDLE;
17390 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
17391 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
17392 if(res == VK_SUCCESS)
17394 VkMemoryRequirements memReq = {};
17395 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
17396 hDev, hImage, &memReq);
17400 memReq.memoryTypeBits,
17401 pAllocationCreateInfo,
17404 allocator->GetVulkanFunctions().vkDestroyImage(
17405 hDev, hImage, allocator->GetAllocationCallbacks());
17415 VMA_ASSERT(allocator && pCreateInfo && pPool);
17417 VMA_DEBUG_LOG(
"vmaCreatePool");
17419 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17421 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
17423 #if VMA_RECORDING_ENABLED
17424 if(allocator->GetRecorder() != VMA_NULL)
17426 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
17437 VMA_ASSERT(allocator);
17439 if(pool == VK_NULL_HANDLE)
17444 VMA_DEBUG_LOG(
"vmaDestroyPool");
17446 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17448 #if VMA_RECORDING_ENABLED
17449 if(allocator->GetRecorder() != VMA_NULL)
17451 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
17455 allocator->DestroyPool(pool);
17463 VMA_ASSERT(allocator && pool && pPoolStats);
17465 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17467 allocator->GetPoolStats(pool, pPoolStats);
17473 size_t* pLostAllocationCount)
17475 VMA_ASSERT(allocator && pool);
17477 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17479 #if VMA_RECORDING_ENABLED
17480 if(allocator->GetRecorder() != VMA_NULL)
17482 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
17486 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
17491 VMA_ASSERT(allocator && pool);
17493 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17495 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
17497 return allocator->CheckPoolCorruption(pool);
17503 const char** ppName)
17505 VMA_ASSERT(allocator && pool);
17507 VMA_DEBUG_LOG(
"vmaGetPoolName");
17509 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17511 *ppName = pool->GetName();
17519 VMA_ASSERT(allocator && pool);
17521 VMA_DEBUG_LOG(
"vmaSetPoolName");
17523 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17525 pool->SetName(pName);
17527 #if VMA_RECORDING_ENABLED
17528 if(allocator->GetRecorder() != VMA_NULL)
17530 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
17537 const VkMemoryRequirements* pVkMemoryRequirements,
17542 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
17544 VMA_DEBUG_LOG(
"vmaAllocateMemory");
17546 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17548 VkResult result = allocator->AllocateMemory(
17549 *pVkMemoryRequirements,
17556 VMA_SUBALLOCATION_TYPE_UNKNOWN,
17560 #if VMA_RECORDING_ENABLED
17561 if(allocator->GetRecorder() != VMA_NULL)
17563 allocator->GetRecorder()->RecordAllocateMemory(
17564 allocator->GetCurrentFrameIndex(),
17565 *pVkMemoryRequirements,
17571 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
17573 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17581 const VkMemoryRequirements* pVkMemoryRequirements,
17583 size_t allocationCount,
17587 if(allocationCount == 0)
17592 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
17594 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
17596 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17598 VkResult result = allocator->AllocateMemory(
17599 *pVkMemoryRequirements,
17606 VMA_SUBALLOCATION_TYPE_UNKNOWN,
17610 #if VMA_RECORDING_ENABLED
17611 if(allocator->GetRecorder() != VMA_NULL)
17613 allocator->GetRecorder()->RecordAllocateMemoryPages(
17614 allocator->GetCurrentFrameIndex(),
17615 *pVkMemoryRequirements,
17617 (uint64_t)allocationCount,
17622 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
17624 for(
size_t i = 0; i < allocationCount; ++i)
17626 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
17640 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17642 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
17644 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17646 VkMemoryRequirements vkMemReq = {};
17647 bool requiresDedicatedAllocation =
false;
17648 bool prefersDedicatedAllocation =
false;
17649 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
17650 requiresDedicatedAllocation,
17651 prefersDedicatedAllocation);
17653 VkResult result = allocator->AllocateMemory(
17655 requiresDedicatedAllocation,
17656 prefersDedicatedAllocation,
17661 VMA_SUBALLOCATION_TYPE_BUFFER,
17665 #if VMA_RECORDING_ENABLED
17666 if(allocator->GetRecorder() != VMA_NULL)
17668 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
17669 allocator->GetCurrentFrameIndex(),
17671 requiresDedicatedAllocation,
17672 prefersDedicatedAllocation,
17678 if(pAllocationInfo && result == VK_SUCCESS)
17680 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17693 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17695 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
17697 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17699 VkMemoryRequirements vkMemReq = {};
17700 bool requiresDedicatedAllocation =
false;
17701 bool prefersDedicatedAllocation =
false;
17702 allocator->GetImageMemoryRequirements(image, vkMemReq,
17703 requiresDedicatedAllocation, prefersDedicatedAllocation);
17705 VkResult result = allocator->AllocateMemory(
17707 requiresDedicatedAllocation,
17708 prefersDedicatedAllocation,
17713 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
17717 #if VMA_RECORDING_ENABLED
17718 if(allocator->GetRecorder() != VMA_NULL)
17720 allocator->GetRecorder()->RecordAllocateMemoryForImage(
17721 allocator->GetCurrentFrameIndex(),
17723 requiresDedicatedAllocation,
17724 prefersDedicatedAllocation,
17730 if(pAllocationInfo && result == VK_SUCCESS)
17732 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17742 VMA_ASSERT(allocator);
17744 if(allocation == VK_NULL_HANDLE)
17749 VMA_DEBUG_LOG(
"vmaFreeMemory");
17751 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17753 #if VMA_RECORDING_ENABLED
17754 if(allocator->GetRecorder() != VMA_NULL)
17756 allocator->GetRecorder()->RecordFreeMemory(
17757 allocator->GetCurrentFrameIndex(),
17762 allocator->FreeMemory(
17769 size_t allocationCount,
17772 if(allocationCount == 0)
17777 VMA_ASSERT(allocator);
17779 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
17781 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17783 #if VMA_RECORDING_ENABLED
17784 if(allocator->GetRecorder() != VMA_NULL)
17786 allocator->GetRecorder()->RecordFreeMemoryPages(
17787 allocator->GetCurrentFrameIndex(),
17788 (uint64_t)allocationCount,
17793 allocator->FreeMemory(allocationCount, pAllocations);
17799 VkDeviceSize newSize)
17801 VMA_ASSERT(allocator && allocation);
17803 VMA_DEBUG_LOG(
"vmaResizeAllocation");
17805 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17807 return allocator->ResizeAllocation(allocation, newSize);
17815 VMA_ASSERT(allocator && allocation && pAllocationInfo);
17817 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17819 #if VMA_RECORDING_ENABLED
17820 if(allocator->GetRecorder() != VMA_NULL)
17822 allocator->GetRecorder()->RecordGetAllocationInfo(
17823 allocator->GetCurrentFrameIndex(),
17828 allocator->GetAllocationInfo(allocation, pAllocationInfo);
17835 VMA_ASSERT(allocator && allocation);
17837 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17839 #if VMA_RECORDING_ENABLED
17840 if(allocator->GetRecorder() != VMA_NULL)
17842 allocator->GetRecorder()->RecordTouchAllocation(
17843 allocator->GetCurrentFrameIndex(),
17848 return allocator->TouchAllocation(allocation);
17856 VMA_ASSERT(allocator && allocation);
17858 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17860 allocation->SetUserData(allocator, pUserData);
17862 #if VMA_RECORDING_ENABLED
17863 if(allocator->GetRecorder() != VMA_NULL)
17865 allocator->GetRecorder()->RecordSetAllocationUserData(
17866 allocator->GetCurrentFrameIndex(),
17877 VMA_ASSERT(allocator && pAllocation);
17879 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17881 allocator->CreateLostAllocation(pAllocation);
17883 #if VMA_RECORDING_ENABLED
17884 if(allocator->GetRecorder() != VMA_NULL)
17886 allocator->GetRecorder()->RecordCreateLostAllocation(
17887 allocator->GetCurrentFrameIndex(),
17898 VMA_ASSERT(allocator && allocation && ppData);
17900 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17902 VkResult res = allocator->Map(allocation, ppData);
17904 #if VMA_RECORDING_ENABLED
17905 if(allocator->GetRecorder() != VMA_NULL)
17907 allocator->GetRecorder()->RecordMapMemory(
17908 allocator->GetCurrentFrameIndex(),
17920 VMA_ASSERT(allocator && allocation);
17922 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17924 #if VMA_RECORDING_ENABLED
17925 if(allocator->GetRecorder() != VMA_NULL)
17927 allocator->GetRecorder()->RecordUnmapMemory(
17928 allocator->GetCurrentFrameIndex(),
17933 allocator->Unmap(allocation);
17938 VMA_ASSERT(allocator && allocation);
17940 VMA_DEBUG_LOG(
"vmaFlushAllocation");
17942 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17944 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
17946 #if VMA_RECORDING_ENABLED
17947 if(allocator->GetRecorder() != VMA_NULL)
17949 allocator->GetRecorder()->RecordFlushAllocation(
17950 allocator->GetCurrentFrameIndex(),
17951 allocation, offset, size);
17958 VMA_ASSERT(allocator && allocation);
17960 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
17962 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17964 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
17966 #if VMA_RECORDING_ENABLED
17967 if(allocator->GetRecorder() != VMA_NULL)
17969 allocator->GetRecorder()->RecordInvalidateAllocation(
17970 allocator->GetCurrentFrameIndex(),
17971 allocation, offset, size);
17978 VMA_ASSERT(allocator);
17980 VMA_DEBUG_LOG(
"vmaCheckCorruption");
17982 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17984 return allocator->CheckCorruption(memoryTypeBits);
17990 size_t allocationCount,
17991 VkBool32* pAllocationsChanged,
18001 if(pDefragmentationInfo != VMA_NULL)
18015 if(res == VK_NOT_READY)
18028 VMA_ASSERT(allocator && pInfo && pContext);
18039 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
18041 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
18043 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18045 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
18047 #if VMA_RECORDING_ENABLED
18048 if(allocator->GetRecorder() != VMA_NULL)
18050 allocator->GetRecorder()->RecordDefragmentationBegin(
18051 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
18062 VMA_ASSERT(allocator);
18064 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
18066 if(context != VK_NULL_HANDLE)
18068 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18070 #if VMA_RECORDING_ENABLED
18071 if(allocator->GetRecorder() != VMA_NULL)
18073 allocator->GetRecorder()->RecordDefragmentationEnd(
18074 allocator->GetCurrentFrameIndex(), context);
18078 return allocator->DefragmentationEnd(context);
18092 VMA_ASSERT(allocator);
18094 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
moveCount, pInfo->
pMoves));
18096 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
18098 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18100 if(context == VK_NULL_HANDLE)
18106 return allocator->DefragmentationPassBegin(pInfo, context);
18112 VMA_ASSERT(allocator);
18114 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
18115 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18117 if(context == VK_NULL_HANDLE)
18120 return allocator->DefragmentationPassEnd(context);
18128 VMA_ASSERT(allocator && allocation && buffer);
18130 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
18132 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18134 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
18140 VkDeviceSize allocationLocalOffset,
18144 VMA_ASSERT(allocator && allocation && buffer);
18146 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
18148 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18150 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
18158 VMA_ASSERT(allocator && allocation && image);
18160 VMA_DEBUG_LOG(
"vmaBindImageMemory");
18162 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18164 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
18170 VkDeviceSize allocationLocalOffset,
18174 VMA_ASSERT(allocator && allocation && image);
18176 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
18178 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18180 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
18185 const VkBufferCreateInfo* pBufferCreateInfo,
18191 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
18193 if(pBufferCreateInfo->size == 0)
18195 return VK_ERROR_VALIDATION_FAILED_EXT;
18197 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
18198 !allocator->m_UseKhrBufferDeviceAddress)
18200 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
18201 return VK_ERROR_VALIDATION_FAILED_EXT;
18204 VMA_DEBUG_LOG(
"vmaCreateBuffer");
18206 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18208 *pBuffer = VK_NULL_HANDLE;
18209 *pAllocation = VK_NULL_HANDLE;
18212 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
18213 allocator->m_hDevice,
18215 allocator->GetAllocationCallbacks(),
18220 VkMemoryRequirements vkMemReq = {};
18221 bool requiresDedicatedAllocation =
false;
18222 bool prefersDedicatedAllocation =
false;
18223 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
18224 requiresDedicatedAllocation, prefersDedicatedAllocation);
18227 res = allocator->AllocateMemory(
18229 requiresDedicatedAllocation,
18230 prefersDedicatedAllocation,
18232 pBufferCreateInfo->usage,
18234 *pAllocationCreateInfo,
18235 VMA_SUBALLOCATION_TYPE_BUFFER,
18239 #if VMA_RECORDING_ENABLED
18240 if(allocator->GetRecorder() != VMA_NULL)
18242 allocator->GetRecorder()->RecordCreateBuffer(
18243 allocator->GetCurrentFrameIndex(),
18244 *pBufferCreateInfo,
18245 *pAllocationCreateInfo,
18255 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
18260 #if VMA_STATS_STRING_ENABLED
18261 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
18263 if(pAllocationInfo != VMA_NULL)
18265 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18270 allocator->FreeMemory(
18273 *pAllocation = VK_NULL_HANDLE;
18274 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18275 *pBuffer = VK_NULL_HANDLE;
18278 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18279 *pBuffer = VK_NULL_HANDLE;
18290 VMA_ASSERT(allocator);
18292 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18297 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
18299 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18301 #if VMA_RECORDING_ENABLED
18302 if(allocator->GetRecorder() != VMA_NULL)
18304 allocator->GetRecorder()->RecordDestroyBuffer(
18305 allocator->GetCurrentFrameIndex(),
18310 if(buffer != VK_NULL_HANDLE)
18312 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
18315 if(allocation != VK_NULL_HANDLE)
18317 allocator->FreeMemory(
18325 const VkImageCreateInfo* pImageCreateInfo,
18331 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
18333 if(pImageCreateInfo->extent.width == 0 ||
18334 pImageCreateInfo->extent.height == 0 ||
18335 pImageCreateInfo->extent.depth == 0 ||
18336 pImageCreateInfo->mipLevels == 0 ||
18337 pImageCreateInfo->arrayLayers == 0)
18339 return VK_ERROR_VALIDATION_FAILED_EXT;
18342 VMA_DEBUG_LOG(
"vmaCreateImage");
18344 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18346 *pImage = VK_NULL_HANDLE;
18347 *pAllocation = VK_NULL_HANDLE;
18350 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
18351 allocator->m_hDevice,
18353 allocator->GetAllocationCallbacks(),
18357 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
18358 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
18359 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
18362 VkMemoryRequirements vkMemReq = {};
18363 bool requiresDedicatedAllocation =
false;
18364 bool prefersDedicatedAllocation =
false;
18365 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
18366 requiresDedicatedAllocation, prefersDedicatedAllocation);
18368 res = allocator->AllocateMemory(
18370 requiresDedicatedAllocation,
18371 prefersDedicatedAllocation,
18375 *pAllocationCreateInfo,
18380 #if VMA_RECORDING_ENABLED
18381 if(allocator->GetRecorder() != VMA_NULL)
18383 allocator->GetRecorder()->RecordCreateImage(
18384 allocator->GetCurrentFrameIndex(),
18386 *pAllocationCreateInfo,
18396 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
18401 #if VMA_STATS_STRING_ENABLED
18402 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
18404 if(pAllocationInfo != VMA_NULL)
18406 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18411 allocator->FreeMemory(
18414 *pAllocation = VK_NULL_HANDLE;
18415 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18416 *pImage = VK_NULL_HANDLE;
18419 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18420 *pImage = VK_NULL_HANDLE;
18431 VMA_ASSERT(allocator);
18433 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18438 VMA_DEBUG_LOG(
"vmaDestroyImage");
18440 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18442 #if VMA_RECORDING_ENABLED
18443 if(allocator->GetRecorder() != VMA_NULL)
18445 allocator->GetRecorder()->RecordDestroyImage(
18446 allocator->GetCurrentFrameIndex(),
18451 if(image != VK_NULL_HANDLE)
18453 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
18455 if(allocation != VK_NULL_HANDLE)
18457 allocator->FreeMemory(
18463 #endif // #ifdef VMA_IMPLEMENTATION