23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H
24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H
1893 #ifndef VMA_RECORDING_ENABLED
1894 #define VMA_RECORDING_ENABLED 0
1898 #define NOMINMAX // For windows.h
1902 #include <vulkan/vulkan.h>
1905 #if VMA_RECORDING_ENABLED
1906 #include <windows.h>
1912 #if !defined(VMA_VULKAN_VERSION)
1913 #if defined(VK_VERSION_1_2)
1914 #define VMA_VULKAN_VERSION 1002000
1915 #elif defined(VK_VERSION_1_1)
1916 #define VMA_VULKAN_VERSION 1001000
1918 #define VMA_VULKAN_VERSION 1000000
1922 #if !defined(VMA_DEDICATED_ALLOCATION)
1923 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation
1924 #define VMA_DEDICATED_ALLOCATION 1
1926 #define VMA_DEDICATED_ALLOCATION 0
1930 #if !defined(VMA_BIND_MEMORY2)
1931 #if VK_KHR_bind_memory2
1932 #define VMA_BIND_MEMORY2 1
1934 #define VMA_BIND_MEMORY2 0
1938 #if !defined(VMA_MEMORY_BUDGET)
1939 #if VK_EXT_memory_budget && (VK_KHR_get_physical_device_properties2 || VMA_VULKAN_VERSION >= 1001000)
1940 #define VMA_MEMORY_BUDGET 1
1942 #define VMA_MEMORY_BUDGET 0
1947 #if !defined(VMA_BUFFER_DEVICE_ADDRESS)
1948 #if VK_KHR_buffer_device_address || VMA_VULKAN_VERSION >= 1002000
1949 #define VMA_BUFFER_DEVICE_ADDRESS 1
1951 #define VMA_BUFFER_DEVICE_ADDRESS 0
1960 #ifndef VMA_CALL_PRE
1961 #define VMA_CALL_PRE
1963 #ifndef VMA_CALL_POST
1964 #define VMA_CALL_POST
1981 uint32_t memoryType,
1982 VkDeviceMemory memory,
1988 uint32_t memoryType,
1989 VkDeviceMemory memory,
2131 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
2132 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
2133 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
2135 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
2136 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
2137 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
2139 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
2140 PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR;
2302 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
2310 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
2320 uint32_t memoryTypeIndex,
2321 VkMemoryPropertyFlags* pFlags);
2333 uint32_t frameIndex);
2429 #ifndef VMA_STATS_STRING_ENABLED
2430 #define VMA_STATS_STRING_ENABLED 1
2433 #if VMA_STATS_STRING_ENABLED
2440 char** ppStatsString,
2441 VkBool32 detailedMap);
2445 char* pStatsString);
2447 #endif // #if VMA_STATS_STRING_ENABLED
2699 uint32_t memoryTypeBits,
2701 uint32_t* pMemoryTypeIndex);
2717 const VkBufferCreateInfo* pBufferCreateInfo,
2719 uint32_t* pMemoryTypeIndex);
2735 const VkImageCreateInfo* pImageCreateInfo,
2737 uint32_t* pMemoryTypeIndex);
2909 size_t* pLostAllocationCount);
2936 const char** ppName);
3029 const VkMemoryRequirements* pVkMemoryRequirements,
3055 const VkMemoryRequirements* pVkMemoryRequirements,
3057 size_t allocationCount,
3102 size_t allocationCount,
3115 VkDeviceSize newSize);
3533 size_t allocationCount,
3534 VkBool32* pAllocationsChanged,
3568 VkDeviceSize allocationLocalOffset,
3602 VkDeviceSize allocationLocalOffset,
3634 const VkBufferCreateInfo* pBufferCreateInfo,
3659 const VkImageCreateInfo* pImageCreateInfo,
3685 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H
3688 #if defined(__cplusplus) && defined(__INTELLISENSE__)
3689 #define VMA_IMPLEMENTATION
3692 #ifdef VMA_IMPLEMENTATION
3693 #undef VMA_IMPLEMENTATION
3713 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES)
3714 #define VMA_STATIC_VULKAN_FUNCTIONS 1
3723 #if !defined(VMA_DYNAMIC_VULKAN_FUNCTIONS)
3724 #define VMA_DYNAMIC_VULKAN_FUNCTIONS 1
3736 #if VMA_USE_STL_CONTAINERS
3737 #define VMA_USE_STL_VECTOR 1
3738 #define VMA_USE_STL_UNORDERED_MAP 1
3739 #define VMA_USE_STL_LIST 1
3742 #ifndef VMA_USE_STL_SHARED_MUTEX
3744 #if __cplusplus >= 201703L
3745 #define VMA_USE_STL_SHARED_MUTEX 1
3749 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L
3750 #define VMA_USE_STL_SHARED_MUTEX 1
3752 #define VMA_USE_STL_SHARED_MUTEX 0
3760 #if VMA_USE_STL_VECTOR
3764 #if VMA_USE_STL_UNORDERED_MAP
3765 #include <unordered_map>
3768 #if VMA_USE_STL_LIST
3777 #include <algorithm>
3782 #define VMA_NULL nullptr
3785 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16)
3787 void *aligned_alloc(
size_t alignment,
size_t size)
3790 if(alignment <
sizeof(
void*))
3792 alignment =
sizeof(
void*);
3795 return memalign(alignment, size);
3797 #elif defined(__APPLE__) || defined(__ANDROID__) || (defined(__linux__) && defined(__GLIBCXX__) && !defined(_GLIBCXX_HAVE_ALIGNED_ALLOC))
3799 void *aligned_alloc(
size_t alignment,
size_t size)
3802 if(alignment <
sizeof(
void*))
3804 alignment =
sizeof(
void*);
3808 if(posix_memalign(&pointer, alignment, size) == 0)
3822 #define VMA_ASSERT(expr)
3824 #define VMA_ASSERT(expr) assert(expr)
3830 #ifndef VMA_HEAVY_ASSERT
3832 #define VMA_HEAVY_ASSERT(expr)
3834 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr)
3838 #ifndef VMA_ALIGN_OF
3839 #define VMA_ALIGN_OF(type) (__alignof(type))
3842 #ifndef VMA_SYSTEM_ALIGNED_MALLOC
3844 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment)))
3846 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) ))
3850 #ifndef VMA_SYSTEM_FREE
3852 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr)
3854 #define VMA_SYSTEM_FREE(ptr) free(ptr)
3859 #define VMA_MIN(v1, v2) (std::min((v1), (v2)))
3863 #define VMA_MAX(v1, v2) (std::max((v1), (v2)))
3867 #define VMA_SWAP(v1, v2) std::swap((v1), (v2))
3871 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp)
3874 #ifndef VMA_DEBUG_LOG
3875 #define VMA_DEBUG_LOG(format, ...)
3885 #if VMA_STATS_STRING_ENABLED
3886 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3888 snprintf(outStr, strLen,
"%u",
static_cast<unsigned int>(num));
3890 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3892 snprintf(outStr, strLen,
"%llu",
static_cast<unsigned long long>(num));
3894 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3896 snprintf(outStr, strLen,
"%p", ptr);
3904 void Lock() { m_Mutex.lock(); }
3905 void Unlock() { m_Mutex.unlock(); }
3906 bool TryLock() {
return m_Mutex.try_lock(); }
3910 #define VMA_MUTEX VmaMutex
3914 #ifndef VMA_RW_MUTEX
3915 #if VMA_USE_STL_SHARED_MUTEX
3917 #include <shared_mutex>
3921 void LockRead() { m_Mutex.lock_shared(); }
3922 void UnlockRead() { m_Mutex.unlock_shared(); }
3923 bool TryLockRead() {
return m_Mutex.try_lock_shared(); }
3924 void LockWrite() { m_Mutex.lock(); }
3925 void UnlockWrite() { m_Mutex.unlock(); }
3926 bool TryLockWrite() {
return m_Mutex.try_lock(); }
3928 std::shared_mutex m_Mutex;
3930 #define VMA_RW_MUTEX VmaRWMutex
3931 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600
3937 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3938 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3939 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3940 bool TryLockRead() {
return TryAcquireSRWLockShared(&m_Lock) != FALSE; }
3941 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3942 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3943 bool TryLockWrite() {
return TryAcquireSRWLockExclusive(&m_Lock) != FALSE; }
3947 #define VMA_RW_MUTEX VmaRWMutex
3953 void LockRead() { m_Mutex.Lock(); }
3954 void UnlockRead() { m_Mutex.Unlock(); }
3955 bool TryLockRead() {
return m_Mutex.TryLock(); }
3956 void LockWrite() { m_Mutex.Lock(); }
3957 void UnlockWrite() { m_Mutex.Unlock(); }
3958 bool TryLockWrite() {
return m_Mutex.TryLock(); }
3962 #define VMA_RW_MUTEX VmaRWMutex
3963 #endif // #if VMA_USE_STL_SHARED_MUTEX
3964 #endif // #ifndef VMA_RW_MUTEX
3969 #ifndef VMA_ATOMIC_UINT32
3971 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t>
3974 #ifndef VMA_ATOMIC_UINT64
3976 #define VMA_ATOMIC_UINT64 std::atomic<uint64_t>
3979 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY
3984 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0)
3987 #ifndef VMA_DEBUG_ALIGNMENT
3992 #define VMA_DEBUG_ALIGNMENT (1)
3995 #ifndef VMA_DEBUG_MARGIN
4000 #define VMA_DEBUG_MARGIN (0)
4003 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS
4008 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0)
4011 #ifndef VMA_DEBUG_DETECT_CORRUPTION
4017 #define VMA_DEBUG_DETECT_CORRUPTION (0)
4020 #ifndef VMA_DEBUG_GLOBAL_MUTEX
4025 #define VMA_DEBUG_GLOBAL_MUTEX (0)
4028 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY
4033 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1)
4036 #ifndef VMA_SMALL_HEAP_MAX_SIZE
4037 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024)
4041 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE
4042 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024)
4046 #ifndef VMA_CLASS_NO_COPY
4047 #define VMA_CLASS_NO_COPY(className) \
4049 className(const className&) = delete; \
4050 className& operator=(const className&) = delete;
4053 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
4056 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
4058 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
4059 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
4067 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY = 0x00000040;
4068 static const uint32_t VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY = 0x00000080;
4069 static const uint32_t VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY = 0x00020000;
4071 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
4073 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
4074 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
4077 static inline uint32_t VmaCountBitsSet(uint32_t v)
4079 uint32_t c = v - ((v >> 1) & 0x55555555);
4080 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
4081 c = ((c >> 4) + c) & 0x0F0F0F0F;
4082 c = ((c >> 8) + c) & 0x00FF00FF;
4083 c = ((c >> 16) + c) & 0x0000FFFF;
4089 template <
typename T>
4090 static inline T VmaAlignUp(T val, T align)
4092 return (val + align - 1) / align * align;
4096 template <
typename T>
4097 static inline T VmaAlignDown(T val, T align)
4099 return val / align * align;
4103 template <
typename T>
4104 static inline T VmaRoundDiv(T x, T y)
4106 return (x + (y / (T)2)) / y;
4114 template <
typename T>
4115 inline bool VmaIsPow2(T x)
4117 return (x & (x-1)) == 0;
4121 static inline uint32_t VmaNextPow2(uint32_t v)
4132 static inline uint64_t VmaNextPow2(uint64_t v)
4146 static inline uint32_t VmaPrevPow2(uint32_t v)
4156 static inline uint64_t VmaPrevPow2(uint64_t v)
4168 static inline bool VmaStrIsEmpty(
const char* pStr)
4170 return pStr == VMA_NULL || *pStr ==
'\0';
4173 #if VMA_STATS_STRING_ENABLED
4175 static const char* VmaAlgorithmToStr(uint32_t algorithm)
4191 #endif // #if VMA_STATS_STRING_ENABLED
4195 template<
typename Iterator,
typename Compare>
4196 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
4198 Iterator centerValue = end; --centerValue;
4199 Iterator insertIndex = beg;
4200 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
4202 if(cmp(*memTypeIndex, *centerValue))
4204 if(insertIndex != memTypeIndex)
4206 VMA_SWAP(*memTypeIndex, *insertIndex);
4211 if(insertIndex != centerValue)
4213 VMA_SWAP(*insertIndex, *centerValue);
4218 template<
typename Iterator,
typename Compare>
4219 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
4223 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
4224 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
4225 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
4229 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp)
4231 #endif // #ifndef VMA_SORT
4240 static inline bool VmaBlocksOnSamePage(
4241 VkDeviceSize resourceAOffset,
4242 VkDeviceSize resourceASize,
4243 VkDeviceSize resourceBOffset,
4244 VkDeviceSize pageSize)
4246 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
4247 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
4248 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
4249 VkDeviceSize resourceBStart = resourceBOffset;
4250 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
4251 return resourceAEndPage == resourceBStartPage;
4254 enum VmaSuballocationType
4256 VMA_SUBALLOCATION_TYPE_FREE = 0,
4257 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
4258 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
4259 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
4260 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
4261 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
4262 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
4271 static inline bool VmaIsBufferImageGranularityConflict(
4272 VmaSuballocationType suballocType1,
4273 VmaSuballocationType suballocType2)
4275 if(suballocType1 > suballocType2)
4277 VMA_SWAP(suballocType1, suballocType2);
4280 switch(suballocType1)
4282 case VMA_SUBALLOCATION_TYPE_FREE:
4284 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
4286 case VMA_SUBALLOCATION_TYPE_BUFFER:
4288 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4289 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4290 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
4292 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
4293 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
4294 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4295 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
4297 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
4298 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
4306 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
4308 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4309 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
4310 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4311 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
4313 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
4320 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
4322 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION
4323 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
4324 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
4325 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
4327 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
4340 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
4342 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
4343 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
4344 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4345 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
4351 VMA_CLASS_NO_COPY(VmaMutexLock)
4353 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
4354 m_pMutex(useMutex ? &mutex : VMA_NULL)
4355 {
if(m_pMutex) { m_pMutex->Lock(); } }
4357 {
if(m_pMutex) { m_pMutex->Unlock(); } }
4359 VMA_MUTEX* m_pMutex;
4363 struct VmaMutexLockRead
4365 VMA_CLASS_NO_COPY(VmaMutexLockRead)
4367 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
4368 m_pMutex(useMutex ? &mutex : VMA_NULL)
4369 {
if(m_pMutex) { m_pMutex->LockRead(); } }
4370 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
4372 VMA_RW_MUTEX* m_pMutex;
4376 struct VmaMutexLockWrite
4378 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
4380 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
4381 m_pMutex(useMutex ? &mutex : VMA_NULL)
4382 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
4383 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
4385 VMA_RW_MUTEX* m_pMutex;
4388 #if VMA_DEBUG_GLOBAL_MUTEX
4389 static VMA_MUTEX gDebugGlobalMutex;
4390 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true);
4392 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK
4396 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
4407 template <
typename CmpLess,
typename IterT,
typename KeyT>
4408 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
4410 size_t down = 0, up = (end - beg);
4413 const size_t mid = (down + up) / 2;
4414 if(cmp(*(beg+mid), key))
4426 template<
typename CmpLess,
typename IterT,
typename KeyT>
4427 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
4429 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4430 beg, end, value, cmp);
4432 (!cmp(*it, value) && !cmp(value, *it)))
4444 template<
typename T>
4445 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
4447 for(uint32_t i = 0; i < count; ++i)
4449 const T iPtr = arr[i];
4450 if(iPtr == VMA_NULL)
4454 for(uint32_t j = i + 1; j < count; ++j)
4465 template<
typename MainT,
typename NewT>
4466 static inline void VmaPnextChainPushFront(MainT* mainStruct, NewT* newStruct)
4468 newStruct->pNext = mainStruct->pNext;
4469 mainStruct->pNext = newStruct;
4475 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
4477 if((pAllocationCallbacks != VMA_NULL) &&
4478 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
4480 return (*pAllocationCallbacks->pfnAllocation)(
4481 pAllocationCallbacks->pUserData,
4484 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
4488 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
4492 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
4494 if((pAllocationCallbacks != VMA_NULL) &&
4495 (pAllocationCallbacks->pfnFree != VMA_NULL))
4497 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
4501 VMA_SYSTEM_FREE(ptr);
4505 template<
typename T>
4506 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
4508 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
4511 template<
typename T>
4512 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4514 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4517 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type)
4519 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type)
4521 template<
typename T>
4522 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4525 VmaFree(pAllocationCallbacks, ptr);
4528 template<
typename T>
4529 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4533 for(
size_t i = count; i--; )
4537 VmaFree(pAllocationCallbacks, ptr);
4541 static char* VmaCreateStringCopy(
const VkAllocationCallbacks* allocs,
const char* srcStr)
4543 if(srcStr != VMA_NULL)
4545 const size_t len = strlen(srcStr);
4546 char*
const result = vma_new_array(allocs,
char, len + 1);
4547 memcpy(result, srcStr, len + 1);
4556 static void VmaFreeString(
const VkAllocationCallbacks* allocs,
char* str)
4560 const size_t len = strlen(str);
4561 vma_delete_array(allocs, str, len + 1);
4566 template<
typename T>
4567 class VmaStlAllocator
4570 const VkAllocationCallbacks*
const m_pCallbacks;
4571 typedef T value_type;
4573 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4574 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4576 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4577 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4579 template<
typename U>
4580 bool operator==(
const VmaStlAllocator<U>& rhs)
const
4582 return m_pCallbacks == rhs.m_pCallbacks;
4584 template<
typename U>
4585 bool operator!=(
const VmaStlAllocator<U>& rhs)
const
4587 return m_pCallbacks != rhs.m_pCallbacks;
4590 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4593 #if VMA_USE_STL_VECTOR
4595 #define VmaVector std::vector
4597 template<
typename T,
typename allocatorT>
4598 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4600 vec.insert(vec.begin() + index, item);
4603 template<
typename T,
typename allocatorT>
4604 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4606 vec.erase(vec.begin() + index);
4609 #else // #if VMA_USE_STL_VECTOR
4614 template<
typename T,
typename AllocatorT>
4618 typedef T value_type;
4620 VmaVector(
const AllocatorT& allocator) :
4621 m_Allocator(allocator),
4628 VmaVector(
size_t count,
const AllocatorT& allocator) :
4629 m_Allocator(allocator),
4630 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4638 VmaVector(
size_t count,
const T& value,
const AllocatorT& allocator)
4639 : VmaVector(count, allocator) {}
4641 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4642 m_Allocator(src.m_Allocator),
4643 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4644 m_Count(src.m_Count),
4645 m_Capacity(src.m_Count)
4649 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4655 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4658 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4662 resize(rhs.m_Count);
4665 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4671 bool empty()
const {
return m_Count == 0; }
4672 size_t size()
const {
return m_Count; }
4673 T* data() {
return m_pArray; }
4674 const T* data()
const {
return m_pArray; }
4676 T& operator[](
size_t index)
4678 VMA_HEAVY_ASSERT(index < m_Count);
4679 return m_pArray[index];
4681 const T& operator[](
size_t index)
const
4683 VMA_HEAVY_ASSERT(index < m_Count);
4684 return m_pArray[index];
4689 VMA_HEAVY_ASSERT(m_Count > 0);
4692 const T& front()
const
4694 VMA_HEAVY_ASSERT(m_Count > 0);
4699 VMA_HEAVY_ASSERT(m_Count > 0);
4700 return m_pArray[m_Count - 1];
4702 const T& back()
const
4704 VMA_HEAVY_ASSERT(m_Count > 0);
4705 return m_pArray[m_Count - 1];
4708 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4710 newCapacity = VMA_MAX(newCapacity, m_Count);
4712 if((newCapacity < m_Capacity) && !freeMemory)
4714 newCapacity = m_Capacity;
4717 if(newCapacity != m_Capacity)
4719 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4722 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4724 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4725 m_Capacity = newCapacity;
4726 m_pArray = newArray;
4730 void resize(
size_t newCount,
bool freeMemory =
false)
4732 size_t newCapacity = m_Capacity;
4733 if(newCount > m_Capacity)
4735 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4739 newCapacity = newCount;
4742 if(newCapacity != m_Capacity)
4744 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4745 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4746 if(elementsToCopy != 0)
4748 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4750 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4751 m_Capacity = newCapacity;
4752 m_pArray = newArray;
4758 void clear(
bool freeMemory =
false)
4760 resize(0, freeMemory);
4763 void insert(
size_t index,
const T& src)
4765 VMA_HEAVY_ASSERT(index <= m_Count);
4766 const size_t oldCount = size();
4767 resize(oldCount + 1);
4768 if(index < oldCount)
4770 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4772 m_pArray[index] = src;
4775 void remove(
size_t index)
4777 VMA_HEAVY_ASSERT(index < m_Count);
4778 const size_t oldCount = size();
4779 if(index < oldCount - 1)
4781 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4783 resize(oldCount - 1);
4786 void push_back(
const T& src)
4788 const size_t newIndex = size();
4789 resize(newIndex + 1);
4790 m_pArray[newIndex] = src;
4795 VMA_HEAVY_ASSERT(m_Count > 0);
4799 void push_front(
const T& src)
4806 VMA_HEAVY_ASSERT(m_Count > 0);
4810 typedef T* iterator;
4812 iterator begin() {
return m_pArray; }
4813 iterator end() {
return m_pArray + m_Count; }
4816 AllocatorT m_Allocator;
4822 template<
typename T,
typename allocatorT>
4823 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4825 vec.insert(index, item);
4828 template<
typename T,
typename allocatorT>
4829 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4834 #endif // #if VMA_USE_STL_VECTOR
4836 template<
typename CmpLess,
typename VectorT>
4837 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4839 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4841 vector.data() + vector.size(),
4843 CmpLess()) - vector.data();
4844 VmaVectorInsert(vector, indexToInsert, value);
4845 return indexToInsert;
4848 template<
typename CmpLess,
typename VectorT>
4849 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4852 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4857 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4859 size_t indexToRemove = it - vector.begin();
4860 VmaVectorRemove(vector, indexToRemove);
4874 template<
typename T>
4875 class VmaPoolAllocator
4877 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4879 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4880 ~VmaPoolAllocator();
4881 template<
typename... Types> T* Alloc(Types... args);
4887 uint32_t NextFreeIndex;
4888 alignas(T)
char Value[
sizeof(T)];
4895 uint32_t FirstFreeIndex;
4898 const VkAllocationCallbacks* m_pAllocationCallbacks;
4899 const uint32_t m_FirstBlockCapacity;
4900 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4902 ItemBlock& CreateNewBlock();
4905 template<
typename T>
4906 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4907 m_pAllocationCallbacks(pAllocationCallbacks),
4908 m_FirstBlockCapacity(firstBlockCapacity),
4909 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4911 VMA_ASSERT(m_FirstBlockCapacity > 1);
4914 template<
typename T>
4915 VmaPoolAllocator<T>::~VmaPoolAllocator()
4917 for(
size_t i = m_ItemBlocks.size(); i--; )
4918 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4919 m_ItemBlocks.clear();
4922 template<
typename T>
4923 template<
typename... Types> T* VmaPoolAllocator<T>::Alloc(Types... args)
4925 for(
size_t i = m_ItemBlocks.size(); i--; )
4927 ItemBlock& block = m_ItemBlocks[i];
4929 if(block.FirstFreeIndex != UINT32_MAX)
4931 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4932 block.FirstFreeIndex = pItem->NextFreeIndex;
4933 T* result = (T*)&pItem->Value;
4934 new(result)T(std::forward<Types>(args)...);
4940 ItemBlock& newBlock = CreateNewBlock();
4941 Item*
const pItem = &newBlock.pItems[0];
4942 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4943 T* result = (T*)&pItem->Value;
4944 new(result)T(std::forward<Types>(args)...);
4948 template<
typename T>
4949 void VmaPoolAllocator<T>::Free(T* ptr)
4952 for(
size_t i = m_ItemBlocks.size(); i--; )
4954 ItemBlock& block = m_ItemBlocks[i];
4958 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4961 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4964 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
4965 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4966 block.FirstFreeIndex = index;
4970 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4973 template<
typename T>
4974 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4976 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4977 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4979 const ItemBlock newBlock = {
4980 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4984 m_ItemBlocks.push_back(newBlock);
4987 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4988 newBlock.pItems[i].NextFreeIndex = i + 1;
4989 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4990 return m_ItemBlocks.back();
4996 #if VMA_USE_STL_LIST
4998 #define VmaList std::list
5000 #else // #if VMA_USE_STL_LIST
5002 template<
typename T>
5011 template<
typename T>
5014 VMA_CLASS_NO_COPY(VmaRawList)
5016 typedef VmaListItem<T> ItemType;
5018 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
5022 size_t GetCount()
const {
return m_Count; }
5023 bool IsEmpty()
const {
return m_Count == 0; }
5025 ItemType* Front() {
return m_pFront; }
5026 const ItemType* Front()
const {
return m_pFront; }
5027 ItemType* Back() {
return m_pBack; }
5028 const ItemType* Back()
const {
return m_pBack; }
5030 ItemType* PushBack();
5031 ItemType* PushFront();
5032 ItemType* PushBack(
const T& value);
5033 ItemType* PushFront(
const T& value);
5038 ItemType* InsertBefore(ItemType* pItem);
5040 ItemType* InsertAfter(ItemType* pItem);
5042 ItemType* InsertBefore(ItemType* pItem,
const T& value);
5043 ItemType* InsertAfter(ItemType* pItem,
const T& value);
5045 void Remove(ItemType* pItem);
5048 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
5049 VmaPoolAllocator<ItemType> m_ItemAllocator;
5055 template<
typename T>
5056 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
5057 m_pAllocationCallbacks(pAllocationCallbacks),
5058 m_ItemAllocator(pAllocationCallbacks, 128),
5065 template<
typename T>
5066 VmaRawList<T>::~VmaRawList()
5072 template<
typename T>
5073 void VmaRawList<T>::Clear()
5075 if(IsEmpty() ==
false)
5077 ItemType* pItem = m_pBack;
5078 while(pItem != VMA_NULL)
5080 ItemType*
const pPrevItem = pItem->pPrev;
5081 m_ItemAllocator.Free(pItem);
5084 m_pFront = VMA_NULL;
5090 template<
typename T>
5091 VmaListItem<T>* VmaRawList<T>::PushBack()
5093 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5094 pNewItem->pNext = VMA_NULL;
5097 pNewItem->pPrev = VMA_NULL;
5098 m_pFront = pNewItem;
5104 pNewItem->pPrev = m_pBack;
5105 m_pBack->pNext = pNewItem;
5112 template<
typename T>
5113 VmaListItem<T>* VmaRawList<T>::PushFront()
5115 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
5116 pNewItem->pPrev = VMA_NULL;
5119 pNewItem->pNext = VMA_NULL;
5120 m_pFront = pNewItem;
5126 pNewItem->pNext = m_pFront;
5127 m_pFront->pPrev = pNewItem;
5128 m_pFront = pNewItem;
5134 template<
typename T>
5135 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
5137 ItemType*
const pNewItem = PushBack();
5138 pNewItem->Value = value;
5142 template<
typename T>
5143 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
5145 ItemType*
const pNewItem = PushFront();
5146 pNewItem->Value = value;
5150 template<
typename T>
5151 void VmaRawList<T>::PopBack()
5153 VMA_HEAVY_ASSERT(m_Count > 0);
5154 ItemType*
const pBackItem = m_pBack;
5155 ItemType*
const pPrevItem = pBackItem->pPrev;
5156 if(pPrevItem != VMA_NULL)
5158 pPrevItem->pNext = VMA_NULL;
5160 m_pBack = pPrevItem;
5161 m_ItemAllocator.Free(pBackItem);
5165 template<
typename T>
5166 void VmaRawList<T>::PopFront()
5168 VMA_HEAVY_ASSERT(m_Count > 0);
5169 ItemType*
const pFrontItem = m_pFront;
5170 ItemType*
const pNextItem = pFrontItem->pNext;
5171 if(pNextItem != VMA_NULL)
5173 pNextItem->pPrev = VMA_NULL;
5175 m_pFront = pNextItem;
5176 m_ItemAllocator.Free(pFrontItem);
5180 template<
typename T>
5181 void VmaRawList<T>::Remove(ItemType* pItem)
5183 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
5184 VMA_HEAVY_ASSERT(m_Count > 0);
5186 if(pItem->pPrev != VMA_NULL)
5188 pItem->pPrev->pNext = pItem->pNext;
5192 VMA_HEAVY_ASSERT(m_pFront == pItem);
5193 m_pFront = pItem->pNext;
5196 if(pItem->pNext != VMA_NULL)
5198 pItem->pNext->pPrev = pItem->pPrev;
5202 VMA_HEAVY_ASSERT(m_pBack == pItem);
5203 m_pBack = pItem->pPrev;
5206 m_ItemAllocator.Free(pItem);
5210 template<
typename T>
5211 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
5213 if(pItem != VMA_NULL)
5215 ItemType*
const prevItem = pItem->pPrev;
5216 ItemType*
const newItem = m_ItemAllocator.Alloc();
5217 newItem->pPrev = prevItem;
5218 newItem->pNext = pItem;
5219 pItem->pPrev = newItem;
5220 if(prevItem != VMA_NULL)
5222 prevItem->pNext = newItem;
5226 VMA_HEAVY_ASSERT(m_pFront == pItem);
5236 template<
typename T>
5237 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
5239 if(pItem != VMA_NULL)
5241 ItemType*
const nextItem = pItem->pNext;
5242 ItemType*
const newItem = m_ItemAllocator.Alloc();
5243 newItem->pNext = nextItem;
5244 newItem->pPrev = pItem;
5245 pItem->pNext = newItem;
5246 if(nextItem != VMA_NULL)
5248 nextItem->pPrev = newItem;
5252 VMA_HEAVY_ASSERT(m_pBack == pItem);
5262 template<
typename T>
5263 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
5265 ItemType*
const newItem = InsertBefore(pItem);
5266 newItem->Value = value;
5270 template<
typename T>
5271 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
5273 ItemType*
const newItem = InsertAfter(pItem);
5274 newItem->Value = value;
5278 template<
typename T,
typename AllocatorT>
5281 VMA_CLASS_NO_COPY(VmaList)
5292 T& operator*()
const
5294 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5295 return m_pItem->Value;
5297 T* operator->()
const
5299 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5300 return &m_pItem->Value;
5303 iterator& operator++()
5305 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5306 m_pItem = m_pItem->pNext;
5309 iterator& operator--()
5311 if(m_pItem != VMA_NULL)
5313 m_pItem = m_pItem->pPrev;
5317 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5318 m_pItem = m_pList->Back();
5323 iterator operator++(
int)
5325 iterator result = *
this;
5329 iterator operator--(
int)
5331 iterator result = *
this;
5336 bool operator==(
const iterator& rhs)
const
5338 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5339 return m_pItem == rhs.m_pItem;
5341 bool operator!=(
const iterator& rhs)
const
5343 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5344 return m_pItem != rhs.m_pItem;
5348 VmaRawList<T>* m_pList;
5349 VmaListItem<T>* m_pItem;
5351 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
5357 friend class VmaList<T, AllocatorT>;
5360 class const_iterator
5369 const_iterator(
const iterator& src) :
5370 m_pList(src.m_pList),
5371 m_pItem(src.m_pItem)
5375 const T& operator*()
const
5377 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5378 return m_pItem->Value;
5380 const T* operator->()
const
5382 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5383 return &m_pItem->Value;
5386 const_iterator& operator++()
5388 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
5389 m_pItem = m_pItem->pNext;
5392 const_iterator& operator--()
5394 if(m_pItem != VMA_NULL)
5396 m_pItem = m_pItem->pPrev;
5400 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
5401 m_pItem = m_pList->Back();
5406 const_iterator operator++(
int)
5408 const_iterator result = *
this;
5412 const_iterator operator--(
int)
5414 const_iterator result = *
this;
5419 bool operator==(
const const_iterator& rhs)
const
5421 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5422 return m_pItem == rhs.m_pItem;
5424 bool operator!=(
const const_iterator& rhs)
const
5426 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
5427 return m_pItem != rhs.m_pItem;
5431 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
5437 const VmaRawList<T>* m_pList;
5438 const VmaListItem<T>* m_pItem;
5440 friend class VmaList<T, AllocatorT>;
5443 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
5445 bool empty()
const {
return m_RawList.IsEmpty(); }
5446 size_t size()
const {
return m_RawList.GetCount(); }
5448 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
5449 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
5451 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
5452 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
5454 void clear() { m_RawList.Clear(); }
5455 void push_back(
const T& value) { m_RawList.PushBack(value); }
5456 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
5457 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
5460 VmaRawList<T> m_RawList;
5463 #endif // #if VMA_USE_STL_LIST
5471 #if VMA_USE_STL_UNORDERED_MAP
5473 #define VmaPair std::pair
5475 #define VMA_MAP_TYPE(KeyT, ValueT) \
5476 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > >
5478 #else // #if VMA_USE_STL_UNORDERED_MAP
5480 template<
typename T1,
typename T2>
5486 VmaPair() : first(), second() { }
5487 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
5493 template<
typename KeyT,
typename ValueT>
5497 typedef VmaPair<KeyT, ValueT> PairType;
5498 typedef PairType* iterator;
5500 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
5502 iterator begin() {
return m_Vector.begin(); }
5503 iterator end() {
return m_Vector.end(); }
5505 void insert(
const PairType& pair);
5506 iterator find(
const KeyT& key);
5507 void erase(iterator it);
5510 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
5513 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT>
5515 template<
typename FirstT,
typename SecondT>
5516 struct VmaPairFirstLess
5518 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const
5520 return lhs.first < rhs.first;
5522 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const
5524 return lhs.first < rhsFirst;
5528 template<
typename KeyT,
typename ValueT>
5529 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
5531 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
5533 m_Vector.data() + m_Vector.size(),
5535 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
5536 VmaVectorInsert(m_Vector, indexToInsert, pair);
5539 template<
typename KeyT,
typename ValueT>
5540 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5542 PairType* it = VmaBinaryFindFirstNotLess(
5544 m_Vector.data() + m_Vector.size(),
5546 VmaPairFirstLess<KeyT, ValueT>());
5547 if((it != m_Vector.end()) && (it->first == key))
5553 return m_Vector.end();
5557 template<
typename KeyT,
typename ValueT>
5558 void VmaMap<KeyT, ValueT>::erase(iterator it)
5560 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5563 #endif // #if VMA_USE_STL_UNORDERED_MAP
5569 class VmaDeviceMemoryBlock;
5571 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5573 struct VmaAllocation_T
5576 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5580 FLAG_USER_DATA_STRING = 0x01,
5584 enum ALLOCATION_TYPE
5586 ALLOCATION_TYPE_NONE,
5587 ALLOCATION_TYPE_BLOCK,
5588 ALLOCATION_TYPE_DEDICATED,
5595 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
5598 m_pUserData{VMA_NULL},
5599 m_LastUseFrameIndex{currentFrameIndex},
5600 m_MemoryTypeIndex{0},
5601 m_Type{(uint8_t)ALLOCATION_TYPE_NONE},
5602 m_SuballocationType{(uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN},
5604 m_Flags{userDataString ? (uint8_t)FLAG_USER_DATA_STRING : (uint8_t)0}
5606 #if VMA_STATS_STRING_ENABLED
5607 m_CreationFrameIndex = currentFrameIndex;
5608 m_BufferImageUsage = 0;
5614 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5617 VMA_ASSERT(m_pUserData == VMA_NULL);
5620 void InitBlockAllocation(
5621 VmaDeviceMemoryBlock* block,
5622 VkDeviceSize offset,
5623 VkDeviceSize alignment,
5625 uint32_t memoryTypeIndex,
5626 VmaSuballocationType suballocationType,
5630 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5631 VMA_ASSERT(block != VMA_NULL);
5632 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5633 m_Alignment = alignment;
5635 m_MemoryTypeIndex = memoryTypeIndex;
5636 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5637 m_SuballocationType = (uint8_t)suballocationType;
5638 m_BlockAllocation.m_Block = block;
5639 m_BlockAllocation.m_Offset = offset;
5640 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5645 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5646 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5647 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5648 m_MemoryTypeIndex = 0;
5649 m_BlockAllocation.m_Block = VMA_NULL;
5650 m_BlockAllocation.m_Offset = 0;
5651 m_BlockAllocation.m_CanBecomeLost =
true;
5654 void ChangeBlockAllocation(
5656 VmaDeviceMemoryBlock* block,
5657 VkDeviceSize offset);
5659 void ChangeOffset(VkDeviceSize newOffset);
5662 void InitDedicatedAllocation(
5663 uint32_t memoryTypeIndex,
5664 VkDeviceMemory hMemory,
5665 VmaSuballocationType suballocationType,
5669 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5670 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5671 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5674 m_MemoryTypeIndex = memoryTypeIndex;
5675 m_SuballocationType = (uint8_t)suballocationType;
5676 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5677 m_DedicatedAllocation.m_hMemory = hMemory;
5678 m_DedicatedAllocation.m_pMappedData = pMappedData;
5681 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5682 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5683 VkDeviceSize GetSize()
const {
return m_Size; }
5684 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5685 void* GetUserData()
const {
return m_pUserData; }
5686 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5687 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5689 VmaDeviceMemoryBlock* GetBlock()
const
5691 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5692 return m_BlockAllocation.m_Block;
5694 VkDeviceSize GetOffset()
const;
5695 VkDeviceMemory GetMemory()
const;
5696 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5697 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5698 void* GetMappedData()
const;
5699 bool CanBecomeLost()
const;
5701 uint32_t GetLastUseFrameIndex()
const
5703 return m_LastUseFrameIndex.load();
5705 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5707 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5717 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5719 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5721 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5732 void BlockAllocMap();
5733 void BlockAllocUnmap();
5734 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5737 #if VMA_STATS_STRING_ENABLED
5738 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5739 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5741 void InitBufferImageUsage(uint32_t bufferImageUsage)
5743 VMA_ASSERT(m_BufferImageUsage == 0);
5744 m_BufferImageUsage = bufferImageUsage;
5747 void PrintParameters(
class VmaJsonWriter& json)
const;
5751 VkDeviceSize m_Alignment;
5752 VkDeviceSize m_Size;
5754 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5755 uint32_t m_MemoryTypeIndex;
5757 uint8_t m_SuballocationType;
5764 struct BlockAllocation
5766 VmaDeviceMemoryBlock* m_Block;
5767 VkDeviceSize m_Offset;
5768 bool m_CanBecomeLost;
5772 struct DedicatedAllocation
5774 VkDeviceMemory m_hMemory;
5775 void* m_pMappedData;
5781 BlockAllocation m_BlockAllocation;
5783 DedicatedAllocation m_DedicatedAllocation;
5786 #if VMA_STATS_STRING_ENABLED
5787 uint32_t m_CreationFrameIndex;
5788 uint32_t m_BufferImageUsage;
5798 struct VmaSuballocation
5800 VkDeviceSize offset;
5803 VmaSuballocationType type;
5807 struct VmaSuballocationOffsetLess
5809 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5811 return lhs.offset < rhs.offset;
5814 struct VmaSuballocationOffsetGreater
5816 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const
5818 return lhs.offset > rhs.offset;
5822 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5825 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5827 enum class VmaAllocationRequestType
5849 struct VmaAllocationRequest
5851 VkDeviceSize offset;
5852 VkDeviceSize sumFreeSize;
5853 VkDeviceSize sumItemSize;
5854 VmaSuballocationList::iterator item;
5855 size_t itemsToMakeLostCount;
5857 VmaAllocationRequestType type;
5859 VkDeviceSize CalcCost()
const
5861 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5869 class VmaBlockMetadata
5873 virtual ~VmaBlockMetadata() { }
5874 virtual void Init(VkDeviceSize size) { m_Size = size; }
5877 virtual bool Validate()
const = 0;
5878 VkDeviceSize GetSize()
const {
return m_Size; }
5879 virtual size_t GetAllocationCount()
const = 0;
5880 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5881 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5883 virtual bool IsEmpty()
const = 0;
5885 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5887 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5889 #if VMA_STATS_STRING_ENABLED
5890 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5896 virtual bool CreateAllocationRequest(
5897 uint32_t currentFrameIndex,
5898 uint32_t frameInUseCount,
5899 VkDeviceSize bufferImageGranularity,
5900 VkDeviceSize allocSize,
5901 VkDeviceSize allocAlignment,
5903 VmaSuballocationType allocType,
5904 bool canMakeOtherLost,
5907 VmaAllocationRequest* pAllocationRequest) = 0;
5909 virtual bool MakeRequestedAllocationsLost(
5910 uint32_t currentFrameIndex,
5911 uint32_t frameInUseCount,
5912 VmaAllocationRequest* pAllocationRequest) = 0;
5914 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5916 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5920 const VmaAllocationRequest& request,
5921 VmaSuballocationType type,
5922 VkDeviceSize allocSize,
5927 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5930 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5932 #if VMA_STATS_STRING_ENABLED
5933 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5934 VkDeviceSize unusedBytes,
5935 size_t allocationCount,
5936 size_t unusedRangeCount)
const;
5937 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5938 VkDeviceSize offset,
5940 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5941 VkDeviceSize offset,
5942 VkDeviceSize size)
const;
5943 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5947 VkDeviceSize m_Size;
5948 const VkAllocationCallbacks* m_pAllocationCallbacks;
5951 #define VMA_VALIDATE(cond) do { if(!(cond)) { \
5952 VMA_ASSERT(0 && "Validation failed: " #cond); \
5956 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5958 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5961 virtual ~VmaBlockMetadata_Generic();
5962 virtual void Init(VkDeviceSize size);
5964 virtual bool Validate()
const;
5965 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5966 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5967 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5968 virtual bool IsEmpty()
const;
5970 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5971 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5973 #if VMA_STATS_STRING_ENABLED
5974 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5977 virtual bool CreateAllocationRequest(
5978 uint32_t currentFrameIndex,
5979 uint32_t frameInUseCount,
5980 VkDeviceSize bufferImageGranularity,
5981 VkDeviceSize allocSize,
5982 VkDeviceSize allocAlignment,
5984 VmaSuballocationType allocType,
5985 bool canMakeOtherLost,
5987 VmaAllocationRequest* pAllocationRequest);
5989 virtual bool MakeRequestedAllocationsLost(
5990 uint32_t currentFrameIndex,
5991 uint32_t frameInUseCount,
5992 VmaAllocationRequest* pAllocationRequest);
5994 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5996 virtual VkResult CheckCorruption(
const void* pBlockData);
5999 const VmaAllocationRequest& request,
6000 VmaSuballocationType type,
6001 VkDeviceSize allocSize,
6005 virtual void FreeAtOffset(VkDeviceSize offset);
6010 bool IsBufferImageGranularityConflictPossible(
6011 VkDeviceSize bufferImageGranularity,
6012 VmaSuballocationType& inOutPrevSuballocType)
const;
6015 friend class VmaDefragmentationAlgorithm_Generic;
6016 friend class VmaDefragmentationAlgorithm_Fast;
6018 uint32_t m_FreeCount;
6019 VkDeviceSize m_SumFreeSize;
6020 VmaSuballocationList m_Suballocations;
6023 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
6025 bool ValidateFreeSuballocationList()
const;
6029 bool CheckAllocation(
6030 uint32_t currentFrameIndex,
6031 uint32_t frameInUseCount,
6032 VkDeviceSize bufferImageGranularity,
6033 VkDeviceSize allocSize,
6034 VkDeviceSize allocAlignment,
6035 VmaSuballocationType allocType,
6036 VmaSuballocationList::const_iterator suballocItem,
6037 bool canMakeOtherLost,
6038 VkDeviceSize* pOffset,
6039 size_t* itemsToMakeLostCount,
6040 VkDeviceSize* pSumFreeSize,
6041 VkDeviceSize* pSumItemSize)
const;
6043 void MergeFreeWithNext(VmaSuballocationList::iterator item);
6047 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
6050 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
6053 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
6134 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
6136 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
6139 virtual ~VmaBlockMetadata_Linear();
6140 virtual void Init(VkDeviceSize size);
6142 virtual bool Validate()
const;
6143 virtual size_t GetAllocationCount()
const;
6144 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
6145 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6146 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
6148 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6149 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6151 #if VMA_STATS_STRING_ENABLED
6152 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6155 virtual bool CreateAllocationRequest(
6156 uint32_t currentFrameIndex,
6157 uint32_t frameInUseCount,
6158 VkDeviceSize bufferImageGranularity,
6159 VkDeviceSize allocSize,
6160 VkDeviceSize allocAlignment,
6162 VmaSuballocationType allocType,
6163 bool canMakeOtherLost,
6165 VmaAllocationRequest* pAllocationRequest);
6167 virtual bool MakeRequestedAllocationsLost(
6168 uint32_t currentFrameIndex,
6169 uint32_t frameInUseCount,
6170 VmaAllocationRequest* pAllocationRequest);
6172 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6174 virtual VkResult CheckCorruption(
const void* pBlockData);
6177 const VmaAllocationRequest& request,
6178 VmaSuballocationType type,
6179 VkDeviceSize allocSize,
6183 virtual void FreeAtOffset(VkDeviceSize offset);
6193 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
6195 enum SECOND_VECTOR_MODE
6197 SECOND_VECTOR_EMPTY,
6202 SECOND_VECTOR_RING_BUFFER,
6208 SECOND_VECTOR_DOUBLE_STACK,
6211 VkDeviceSize m_SumFreeSize;
6212 SuballocationVectorType m_Suballocations0, m_Suballocations1;
6213 uint32_t m_1stVectorIndex;
6214 SECOND_VECTOR_MODE m_2ndVectorMode;
6216 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6217 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6218 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
6219 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
6222 size_t m_1stNullItemsBeginCount;
6224 size_t m_1stNullItemsMiddleCount;
6226 size_t m_2ndNullItemsCount;
6228 bool ShouldCompact1st()
const;
6229 void CleanupAfterFree();
6231 bool CreateAllocationRequest_LowerAddress(
6232 uint32_t currentFrameIndex,
6233 uint32_t frameInUseCount,
6234 VkDeviceSize bufferImageGranularity,
6235 VkDeviceSize allocSize,
6236 VkDeviceSize allocAlignment,
6237 VmaSuballocationType allocType,
6238 bool canMakeOtherLost,
6240 VmaAllocationRequest* pAllocationRequest);
6241 bool CreateAllocationRequest_UpperAddress(
6242 uint32_t currentFrameIndex,
6243 uint32_t frameInUseCount,
6244 VkDeviceSize bufferImageGranularity,
6245 VkDeviceSize allocSize,
6246 VkDeviceSize allocAlignment,
6247 VmaSuballocationType allocType,
6248 bool canMakeOtherLost,
6250 VmaAllocationRequest* pAllocationRequest);
6264 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
6266 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
6269 virtual ~VmaBlockMetadata_Buddy();
6270 virtual void Init(VkDeviceSize size);
6272 virtual bool Validate()
const;
6273 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
6274 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
6275 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
6276 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
6278 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
6279 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
6281 #if VMA_STATS_STRING_ENABLED
6282 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
6285 virtual bool CreateAllocationRequest(
6286 uint32_t currentFrameIndex,
6287 uint32_t frameInUseCount,
6288 VkDeviceSize bufferImageGranularity,
6289 VkDeviceSize allocSize,
6290 VkDeviceSize allocAlignment,
6292 VmaSuballocationType allocType,
6293 bool canMakeOtherLost,
6295 VmaAllocationRequest* pAllocationRequest);
6297 virtual bool MakeRequestedAllocationsLost(
6298 uint32_t currentFrameIndex,
6299 uint32_t frameInUseCount,
6300 VmaAllocationRequest* pAllocationRequest);
6302 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
6304 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
6307 const VmaAllocationRequest& request,
6308 VmaSuballocationType type,
6309 VkDeviceSize allocSize,
6312 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
6313 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
6316 static const VkDeviceSize MIN_NODE_SIZE = 32;
6317 static const size_t MAX_LEVELS = 30;
6319 struct ValidationContext
6321 size_t calculatedAllocationCount;
6322 size_t calculatedFreeCount;
6323 VkDeviceSize calculatedSumFreeSize;
6325 ValidationContext() :
6326 calculatedAllocationCount(0),
6327 calculatedFreeCount(0),
6328 calculatedSumFreeSize(0) { }
6333 VkDeviceSize offset;
6363 VkDeviceSize m_UsableSize;
6364 uint32_t m_LevelCount;
6370 } m_FreeList[MAX_LEVELS];
6372 size_t m_AllocationCount;
6376 VkDeviceSize m_SumFreeSize;
6378 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
6379 void DeleteNode(Node* node);
6380 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
6381 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
6382 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
6384 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
6385 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
6389 void AddToFreeListFront(uint32_t level, Node* node);
6393 void RemoveFromFreeList(uint32_t level, Node* node);
6395 #if VMA_STATS_STRING_ENABLED
6396 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
6406 class VmaDeviceMemoryBlock
6408 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
6410 VmaBlockMetadata* m_pMetadata;
6414 ~VmaDeviceMemoryBlock()
6416 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6417 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6424 uint32_t newMemoryTypeIndex,
6425 VkDeviceMemory newMemory,
6426 VkDeviceSize newSize,
6428 uint32_t algorithm);
6432 VmaPool GetParentPool()
const {
return m_hParentPool; }
6433 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
6434 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6435 uint32_t GetId()
const {
return m_Id; }
6436 void* GetMappedData()
const {
return m_pMappedData; }
6439 bool Validate()
const;
6444 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
6447 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6448 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
6450 VkResult BindBufferMemory(
6453 VkDeviceSize allocationLocalOffset,
6456 VkResult BindImageMemory(
6459 VkDeviceSize allocationLocalOffset,
6465 uint32_t m_MemoryTypeIndex;
6467 VkDeviceMemory m_hMemory;
6475 uint32_t m_MapCount;
6476 void* m_pMappedData;
6479 struct VmaPointerLess
6481 bool operator()(
const void* lhs,
const void* rhs)
const
6487 struct VmaDefragmentationMove
6489 size_t srcBlockIndex;
6490 size_t dstBlockIndex;
6491 VkDeviceSize srcOffset;
6492 VkDeviceSize dstOffset;
6495 VmaDeviceMemoryBlock* pSrcBlock;
6496 VmaDeviceMemoryBlock* pDstBlock;
6499 class VmaDefragmentationAlgorithm;
6507 struct VmaBlockVector
6509 VMA_CLASS_NO_COPY(VmaBlockVector)
6514 uint32_t memoryTypeIndex,
6515 VkDeviceSize preferredBlockSize,
6516 size_t minBlockCount,
6517 size_t maxBlockCount,
6518 VkDeviceSize bufferImageGranularity,
6519 uint32_t frameInUseCount,
6520 bool explicitBlockSize,
6521 uint32_t algorithm);
6524 VkResult CreateMinBlocks();
6526 VmaAllocator GetAllocator()
const {
return m_hAllocator; }
6527 VmaPool GetParentPool()
const {
return m_hParentPool; }
6528 bool IsCustomPool()
const {
return m_hParentPool != VMA_NULL; }
6529 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
6530 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
6531 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
6532 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
6533 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
6538 bool IsCorruptionDetectionEnabled()
const;
6541 uint32_t currentFrameIndex,
6543 VkDeviceSize alignment,
6545 VmaSuballocationType suballocType,
6546 size_t allocationCount,
6554 #if VMA_STATS_STRING_ENABLED
6555 void PrintDetailedMap(
class VmaJsonWriter& json);
6558 void MakePoolAllocationsLost(
6559 uint32_t currentFrameIndex,
6560 size_t* pLostAllocationCount);
6561 VkResult CheckCorruption();
6565 class VmaBlockVectorDefragmentationContext* pCtx,
6567 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6568 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6569 VkCommandBuffer commandBuffer);
6570 void DefragmentationEnd(
6571 class VmaBlockVectorDefragmentationContext* pCtx,
6575 uint32_t ProcessDefragmentations(
6576 class VmaBlockVectorDefragmentationContext *pCtx,
6579 void CommitDefragmentations(
6580 class VmaBlockVectorDefragmentationContext *pCtx,
6586 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6587 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6588 size_t CalcAllocationCount()
const;
6589 bool IsBufferImageGranularityConflictPossible()
const;
6592 friend class VmaDefragmentationAlgorithm_Generic;
6596 const uint32_t m_MemoryTypeIndex;
6597 const VkDeviceSize m_PreferredBlockSize;
6598 const size_t m_MinBlockCount;
6599 const size_t m_MaxBlockCount;
6600 const VkDeviceSize m_BufferImageGranularity;
6601 const uint32_t m_FrameInUseCount;
6602 const bool m_ExplicitBlockSize;
6603 const uint32_t m_Algorithm;
6604 VMA_RW_MUTEX m_Mutex;
6608 bool m_HasEmptyBlock;
6610 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6611 uint32_t m_NextBlockId;
6613 VkDeviceSize CalcMaxBlockSize()
const;
6616 void Remove(VmaDeviceMemoryBlock* pBlock);
6620 void IncrementallySortBlocks();
6622 VkResult AllocatePage(
6623 uint32_t currentFrameIndex,
6625 VkDeviceSize alignment,
6627 VmaSuballocationType suballocType,
6631 VkResult AllocateFromBlock(
6632 VmaDeviceMemoryBlock* pBlock,
6633 uint32_t currentFrameIndex,
6635 VkDeviceSize alignment,
6638 VmaSuballocationType suballocType,
6642 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6645 void ApplyDefragmentationMovesCpu(
6646 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6647 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6649 void ApplyDefragmentationMovesGpu(
6650 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6651 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6652 VkCommandBuffer commandBuffer);
6660 void UpdateHasEmptyBlock();
6665 VMA_CLASS_NO_COPY(VmaPool_T)
6667 VmaBlockVector m_BlockVector;
6672 VkDeviceSize preferredBlockSize);
6675 uint32_t GetId()
const {
return m_Id; }
6676 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6678 const char* GetName()
const {
return m_Name; }
6679 void SetName(
const char* pName);
6681 #if VMA_STATS_STRING_ENABLED
6697 class VmaDefragmentationAlgorithm
6699 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6701 VmaDefragmentationAlgorithm(
6703 VmaBlockVector* pBlockVector,
6704 uint32_t currentFrameIndex) :
6705 m_hAllocator(hAllocator),
6706 m_pBlockVector(pBlockVector),
6707 m_CurrentFrameIndex(currentFrameIndex)
6710 virtual ~VmaDefragmentationAlgorithm()
6714 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6715 virtual void AddAll() = 0;
6717 virtual VkResult Defragment(
6718 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6719 VkDeviceSize maxBytesToMove,
6720 uint32_t maxAllocationsToMove,
6723 virtual VkDeviceSize GetBytesMoved()
const = 0;
6724 virtual uint32_t GetAllocationsMoved()
const = 0;
6728 VmaBlockVector*
const m_pBlockVector;
6729 const uint32_t m_CurrentFrameIndex;
6731 struct AllocationInfo
6734 VkBool32* m_pChanged;
6737 m_hAllocation(VK_NULL_HANDLE),
6738 m_pChanged(VMA_NULL)
6742 m_hAllocation(hAlloc),
6743 m_pChanged(pChanged)
6749 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6751 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6753 VmaDefragmentationAlgorithm_Generic(
6755 VmaBlockVector* pBlockVector,
6756 uint32_t currentFrameIndex,
6757 bool overlappingMoveSupported);
6758 virtual ~VmaDefragmentationAlgorithm_Generic();
6760 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6761 virtual void AddAll() { m_AllAllocations =
true; }
6763 virtual VkResult Defragment(
6764 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6765 VkDeviceSize maxBytesToMove,
6766 uint32_t maxAllocationsToMove,
6769 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6770 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6773 uint32_t m_AllocationCount;
6774 bool m_AllAllocations;
6776 VkDeviceSize m_BytesMoved;
6777 uint32_t m_AllocationsMoved;
6779 struct AllocationInfoSizeGreater
6781 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6783 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6787 struct AllocationInfoOffsetGreater
6789 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const
6791 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6797 size_t m_OriginalBlockIndex;
6798 VmaDeviceMemoryBlock* m_pBlock;
6799 bool m_HasNonMovableAllocations;
6800 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6802 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6803 m_OriginalBlockIndex(SIZE_MAX),
6805 m_HasNonMovableAllocations(true),
6806 m_Allocations(pAllocationCallbacks)
6810 void CalcHasNonMovableAllocations()
6812 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6813 const size_t defragmentAllocCount = m_Allocations.size();
6814 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6817 void SortAllocationsBySizeDescending()
6819 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6822 void SortAllocationsByOffsetDescending()
6824 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6828 struct BlockPointerLess
6830 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const
6832 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6834 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6836 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6842 struct BlockInfoCompareMoveDestination
6844 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const
6846 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6850 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6854 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6862 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6863 BlockInfoVector m_Blocks;
6865 VkResult DefragmentRound(
6866 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6867 VkDeviceSize maxBytesToMove,
6868 uint32_t maxAllocationsToMove,
6869 bool freeOldAllocations);
6871 size_t CalcBlocksWithNonMovableCount()
const;
6873 static bool MoveMakesSense(
6874 size_t dstBlockIndex, VkDeviceSize dstOffset,
6875 size_t srcBlockIndex, VkDeviceSize srcOffset);
6878 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6880 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6882 VmaDefragmentationAlgorithm_Fast(
6884 VmaBlockVector* pBlockVector,
6885 uint32_t currentFrameIndex,
6886 bool overlappingMoveSupported);
6887 virtual ~VmaDefragmentationAlgorithm_Fast();
6889 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6890 virtual void AddAll() { m_AllAllocations =
true; }
6892 virtual VkResult Defragment(
6893 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6894 VkDeviceSize maxBytesToMove,
6895 uint32_t maxAllocationsToMove,
6898 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6899 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6904 size_t origBlockIndex;
6907 class FreeSpaceDatabase
6913 s.blockInfoIndex = SIZE_MAX;
6914 for(
size_t i = 0; i < MAX_COUNT; ++i)
6916 m_FreeSpaces[i] = s;
6920 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6922 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6928 size_t bestIndex = SIZE_MAX;
6929 for(
size_t i = 0; i < MAX_COUNT; ++i)
6932 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6937 if(m_FreeSpaces[i].size < size &&
6938 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6944 if(bestIndex != SIZE_MAX)
6946 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6947 m_FreeSpaces[bestIndex].offset = offset;
6948 m_FreeSpaces[bestIndex].size = size;
6952 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6953 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6955 size_t bestIndex = SIZE_MAX;
6956 VkDeviceSize bestFreeSpaceAfter = 0;
6957 for(
size_t i = 0; i < MAX_COUNT; ++i)
6960 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6962 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6964 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6966 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6968 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6971 bestFreeSpaceAfter = freeSpaceAfter;
6977 if(bestIndex != SIZE_MAX)
6979 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6980 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6982 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6985 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6986 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6987 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6992 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
7002 static const size_t MAX_COUNT = 4;
7006 size_t blockInfoIndex;
7007 VkDeviceSize offset;
7009 } m_FreeSpaces[MAX_COUNT];
7012 const bool m_OverlappingMoveSupported;
7014 uint32_t m_AllocationCount;
7015 bool m_AllAllocations;
7017 VkDeviceSize m_BytesMoved;
7018 uint32_t m_AllocationsMoved;
7020 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
7022 void PreprocessMetadata();
7023 void PostprocessMetadata();
7024 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
7027 struct VmaBlockDefragmentationContext
7031 BLOCK_FLAG_USED = 0x00000001,
7037 class VmaBlockVectorDefragmentationContext
7039 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
7043 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
7044 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > defragmentationMoves;
7045 uint32_t defragmentationMovesProcessed;
7046 uint32_t defragmentationMovesCommitted;
7047 bool hasDefragmentationPlan;
7049 VmaBlockVectorDefragmentationContext(
7052 VmaBlockVector* pBlockVector,
7053 uint32_t currFrameIndex);
7054 ~VmaBlockVectorDefragmentationContext();
7056 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
7057 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
7058 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
7060 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
7061 void AddAll() { m_AllAllocations =
true; }
7070 VmaBlockVector*
const m_pBlockVector;
7071 const uint32_t m_CurrFrameIndex;
7073 VmaDefragmentationAlgorithm* m_pAlgorithm;
7081 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
7082 bool m_AllAllocations;
7085 struct VmaDefragmentationContext_T
7088 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
7090 VmaDefragmentationContext_T(
7092 uint32_t currFrameIndex,
7095 ~VmaDefragmentationContext_T();
7097 void AddPools(uint32_t poolCount,
VmaPool* pPools);
7098 void AddAllocations(
7099 uint32_t allocationCount,
7101 VkBool32* pAllocationsChanged);
7109 VkResult Defragment(
7110 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
7111 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
7115 VkResult DefragmentPassEnd();
7119 const uint32_t m_CurrFrameIndex;
7120 const uint32_t m_Flags;
7123 VkDeviceSize m_MaxCpuBytesToMove;
7124 uint32_t m_MaxCpuAllocationsToMove;
7125 VkDeviceSize m_MaxGpuBytesToMove;
7126 uint32_t m_MaxGpuAllocationsToMove;
7129 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
7131 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
7134 #if VMA_RECORDING_ENABLED
7141 void WriteConfiguration(
7142 const VkPhysicalDeviceProperties& devProps,
7143 const VkPhysicalDeviceMemoryProperties& memProps,
7144 uint32_t vulkanApiVersion,
7145 bool dedicatedAllocationExtensionEnabled,
7146 bool bindMemory2ExtensionEnabled,
7147 bool memoryBudgetExtensionEnabled,
7148 bool deviceCoherentMemoryExtensionEnabled);
7151 void RecordCreateAllocator(uint32_t frameIndex);
7152 void RecordDestroyAllocator(uint32_t frameIndex);
7153 void RecordCreatePool(uint32_t frameIndex,
7156 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
7157 void RecordAllocateMemory(uint32_t frameIndex,
7158 const VkMemoryRequirements& vkMemReq,
7161 void RecordAllocateMemoryPages(uint32_t frameIndex,
7162 const VkMemoryRequirements& vkMemReq,
7164 uint64_t allocationCount,
7166 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
7167 const VkMemoryRequirements& vkMemReq,
7168 bool requiresDedicatedAllocation,
7169 bool prefersDedicatedAllocation,
7172 void RecordAllocateMemoryForImage(uint32_t frameIndex,
7173 const VkMemoryRequirements& vkMemReq,
7174 bool requiresDedicatedAllocation,
7175 bool prefersDedicatedAllocation,
7178 void RecordFreeMemory(uint32_t frameIndex,
7180 void RecordFreeMemoryPages(uint32_t frameIndex,
7181 uint64_t allocationCount,
7183 void RecordSetAllocationUserData(uint32_t frameIndex,
7185 const void* pUserData);
7186 void RecordCreateLostAllocation(uint32_t frameIndex,
7188 void RecordMapMemory(uint32_t frameIndex,
7190 void RecordUnmapMemory(uint32_t frameIndex,
7192 void RecordFlushAllocation(uint32_t frameIndex,
7193 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7194 void RecordInvalidateAllocation(uint32_t frameIndex,
7195 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
7196 void RecordCreateBuffer(uint32_t frameIndex,
7197 const VkBufferCreateInfo& bufCreateInfo,
7200 void RecordCreateImage(uint32_t frameIndex,
7201 const VkImageCreateInfo& imageCreateInfo,
7204 void RecordDestroyBuffer(uint32_t frameIndex,
7206 void RecordDestroyImage(uint32_t frameIndex,
7208 void RecordTouchAllocation(uint32_t frameIndex,
7210 void RecordGetAllocationInfo(uint32_t frameIndex,
7212 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
7214 void RecordDefragmentationBegin(uint32_t frameIndex,
7217 void RecordDefragmentationEnd(uint32_t frameIndex,
7219 void RecordSetPoolName(uint32_t frameIndex,
7230 class UserDataString
7234 const char* GetString()
const {
return m_Str; }
7244 VMA_MUTEX m_FileMutex;
7246 int64_t m_StartCounter;
7248 void GetBasicParams(CallParams& outParams);
7251 template<
typename T>
7252 void PrintPointerList(uint64_t count,
const T* pItems)
7256 fprintf(m_File,
"%p", pItems[0]);
7257 for(uint64_t i = 1; i < count; ++i)
7259 fprintf(m_File,
" %p", pItems[i]);
7264 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
7268 #endif // #if VMA_RECORDING_ENABLED
7273 class VmaAllocationObjectAllocator
7275 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
7277 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
7279 template<
typename... Types>
VmaAllocation Allocate(Types... args);
7284 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
7287 struct VmaCurrentBudgetData
7289 VMA_ATOMIC_UINT64 m_BlockBytes[VK_MAX_MEMORY_HEAPS];
7290 VMA_ATOMIC_UINT64 m_AllocationBytes[VK_MAX_MEMORY_HEAPS];
7292 #if VMA_MEMORY_BUDGET
7293 VMA_ATOMIC_UINT32 m_OperationsSinceBudgetFetch;
7294 VMA_RW_MUTEX m_BudgetMutex;
7295 uint64_t m_VulkanUsage[VK_MAX_MEMORY_HEAPS];
7296 uint64_t m_VulkanBudget[VK_MAX_MEMORY_HEAPS];
7297 uint64_t m_BlockBytesAtBudgetFetch[VK_MAX_MEMORY_HEAPS];
7298 #endif // #if VMA_MEMORY_BUDGET
7300 VmaCurrentBudgetData()
7302 for(uint32_t heapIndex = 0; heapIndex < VK_MAX_MEMORY_HEAPS; ++heapIndex)
7304 m_BlockBytes[heapIndex] = 0;
7305 m_AllocationBytes[heapIndex] = 0;
7306 #if VMA_MEMORY_BUDGET
7307 m_VulkanUsage[heapIndex] = 0;
7308 m_VulkanBudget[heapIndex] = 0;
7309 m_BlockBytesAtBudgetFetch[heapIndex] = 0;
7313 #if VMA_MEMORY_BUDGET
7314 m_OperationsSinceBudgetFetch = 0;
7318 void AddAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7320 m_AllocationBytes[heapIndex] += allocationSize;
7321 #if VMA_MEMORY_BUDGET
7322 ++m_OperationsSinceBudgetFetch;
7326 void RemoveAllocation(uint32_t heapIndex, VkDeviceSize allocationSize)
7328 VMA_ASSERT(m_AllocationBytes[heapIndex] >= allocationSize);
7329 m_AllocationBytes[heapIndex] -= allocationSize;
7330 #if VMA_MEMORY_BUDGET
7331 ++m_OperationsSinceBudgetFetch;
7337 struct VmaAllocator_T
7339 VMA_CLASS_NO_COPY(VmaAllocator_T)
7342 uint32_t m_VulkanApiVersion;
7343 bool m_UseKhrDedicatedAllocation;
7344 bool m_UseKhrBindMemory2;
7345 bool m_UseExtMemoryBudget;
7346 bool m_UseAmdDeviceCoherentMemory;
7347 bool m_UseKhrBufferDeviceAddress;
7349 VkInstance m_hInstance;
7350 bool m_AllocationCallbacksSpecified;
7351 VkAllocationCallbacks m_AllocationCallbacks;
7353 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
7356 uint32_t m_HeapSizeLimitMask;
7358 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
7359 VkPhysicalDeviceMemoryProperties m_MemProps;
7362 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
7365 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
7366 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
7367 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
7369 VmaCurrentBudgetData m_Budget;
7375 const VkAllocationCallbacks* GetAllocationCallbacks()
const
7377 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
7381 return m_VulkanFunctions;
7384 VkPhysicalDevice GetPhysicalDevice()
const {
return m_PhysicalDevice; }
7386 VkDeviceSize GetBufferImageGranularity()
const
7389 static_cast<VkDeviceSize
>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
7390 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
7393 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
7394 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
7396 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const
7398 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
7399 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
7402 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const
7404 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
7405 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
7408 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const
7410 return IsMemoryTypeNonCoherent(memTypeIndex) ?
7411 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
7412 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
7415 bool IsIntegratedGpu()
const
7417 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
7420 uint32_t GetGlobalMemoryTypeBits()
const {
return m_GlobalMemoryTypeBits; }
7422 #if VMA_RECORDING_ENABLED
7423 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
7426 void GetBufferMemoryRequirements(
7428 VkMemoryRequirements& memReq,
7429 bool& requiresDedicatedAllocation,
7430 bool& prefersDedicatedAllocation)
const;
7431 void GetImageMemoryRequirements(
7433 VkMemoryRequirements& memReq,
7434 bool& requiresDedicatedAllocation,
7435 bool& prefersDedicatedAllocation)
const;
7438 VkResult AllocateMemory(
7439 const VkMemoryRequirements& vkMemReq,
7440 bool requiresDedicatedAllocation,
7441 bool prefersDedicatedAllocation,
7442 VkBuffer dedicatedBuffer,
7443 VkBufferUsageFlags dedicatedBufferUsage,
7444 VkImage dedicatedImage,
7446 VmaSuballocationType suballocType,
7447 size_t allocationCount,
7452 size_t allocationCount,
7455 VkResult ResizeAllocation(
7457 VkDeviceSize newSize);
7459 void CalculateStats(
VmaStats* pStats);
7462 VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount);
7464 #if VMA_STATS_STRING_ENABLED
7465 void PrintDetailedMap(
class VmaJsonWriter& json);
7468 VkResult DefragmentationBegin(
7472 VkResult DefragmentationEnd(
7475 VkResult DefragmentationPassBegin(
7478 VkResult DefragmentationPassEnd(
7485 void DestroyPool(
VmaPool pool);
7488 void SetCurrentFrameIndex(uint32_t frameIndex);
7489 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
7491 void MakePoolAllocationsLost(
7493 size_t* pLostAllocationCount);
7494 VkResult CheckPoolCorruption(
VmaPool hPool);
7495 VkResult CheckCorruption(uint32_t memoryTypeBits);
7500 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
7502 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
7504 VkResult BindVulkanBuffer(
7505 VkDeviceMemory memory,
7506 VkDeviceSize memoryOffset,
7510 VkResult BindVulkanImage(
7511 VkDeviceMemory memory,
7512 VkDeviceSize memoryOffset,
7519 VkResult BindBufferMemory(
7521 VkDeviceSize allocationLocalOffset,
7524 VkResult BindImageMemory(
7526 VkDeviceSize allocationLocalOffset,
7530 void FlushOrInvalidateAllocation(
7532 VkDeviceSize offset, VkDeviceSize size,
7533 VMA_CACHE_OPERATION op);
7535 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
7541 uint32_t GetGpuDefragmentationMemoryTypeBits();
7544 VkDeviceSize m_PreferredLargeHeapBlockSize;
7546 VkPhysicalDevice m_PhysicalDevice;
7547 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
7548 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
7550 VMA_RW_MUTEX m_PoolsMutex;
7552 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
7553 uint32_t m_NextPoolId;
7558 uint32_t m_GlobalMemoryTypeBits;
7560 #if VMA_RECORDING_ENABLED
7561 VmaRecorder* m_pRecorder;
7565 void ImportVulkanFunctions_Static();
7567 void ImportVulkanFunctions_Dynamic();
7568 void ValidateVulkanFunctions();
7570 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
7572 VkResult AllocateMemoryOfType(
7574 VkDeviceSize alignment,
7575 bool dedicatedAllocation,
7576 VkBuffer dedicatedBuffer,
7577 VkBufferUsageFlags dedicatedBufferUsage,
7578 VkImage dedicatedImage,
7580 uint32_t memTypeIndex,
7581 VmaSuballocationType suballocType,
7582 size_t allocationCount,
7586 VkResult AllocateDedicatedMemoryPage(
7588 VmaSuballocationType suballocType,
7589 uint32_t memTypeIndex,
7590 const VkMemoryAllocateInfo& allocInfo,
7592 bool isUserDataString,
7597 VkResult AllocateDedicatedMemory(
7599 VmaSuballocationType suballocType,
7600 uint32_t memTypeIndex,
7603 bool isUserDataString,
7605 VkBuffer dedicatedBuffer,
7606 VkBufferUsageFlags dedicatedBufferUsage,
7607 VkImage dedicatedImage,
7608 size_t allocationCount,
7617 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
7619 uint32_t CalculateGlobalMemoryTypeBits()
const;
7621 #if VMA_MEMORY_BUDGET
7622 void UpdateVulkanBudget();
7623 #endif // #if VMA_MEMORY_BUDGET
7629 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
7631 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
7634 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
7636 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
7639 template<
typename T>
7642 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
7645 template<
typename T>
7646 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
7648 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
7651 template<
typename T>
7652 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
7657 VmaFree(hAllocator, ptr);
7661 template<
typename T>
7662 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7666 for(
size_t i = count; i--; )
7668 VmaFree(hAllocator, ptr);
7675 #if VMA_STATS_STRING_ENABLED
7677 class VmaStringBuilder
7680 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7681 size_t GetLength()
const {
return m_Data.size(); }
7682 const char* GetData()
const {
return m_Data.data(); }
7684 void Add(
char ch) { m_Data.push_back(ch); }
7685 void Add(
const char* pStr);
7686 void AddNewLine() { Add(
'\n'); }
7687 void AddNumber(uint32_t num);
7688 void AddNumber(uint64_t num);
7689 void AddPointer(
const void* ptr);
7692 VmaVector< char, VmaStlAllocator<char> > m_Data;
7695 void VmaStringBuilder::Add(
const char* pStr)
7697 const size_t strLen = strlen(pStr);
7700 const size_t oldCount = m_Data.size();
7701 m_Data.resize(oldCount + strLen);
7702 memcpy(m_Data.data() + oldCount, pStr, strLen);
7706 void VmaStringBuilder::AddNumber(uint32_t num)
7713 *--p =
'0' + (num % 10);
7720 void VmaStringBuilder::AddNumber(uint64_t num)
7727 *--p =
'0' + (num % 10);
7734 void VmaStringBuilder::AddPointer(
const void* ptr)
7737 VmaPtrToStr(buf,
sizeof(buf), ptr);
7741 #endif // #if VMA_STATS_STRING_ENABLED
7746 #if VMA_STATS_STRING_ENABLED
7750 VMA_CLASS_NO_COPY(VmaJsonWriter)
7752 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7755 void BeginObject(
bool singleLine =
false);
7758 void BeginArray(
bool singleLine =
false);
7761 void WriteString(
const char* pStr);
7762 void BeginString(
const char* pStr = VMA_NULL);
7763 void ContinueString(
const char* pStr);
7764 void ContinueString(uint32_t n);
7765 void ContinueString(uint64_t n);
7766 void ContinueString_Pointer(
const void* ptr);
7767 void EndString(
const char* pStr = VMA_NULL);
7769 void WriteNumber(uint32_t n);
7770 void WriteNumber(uint64_t n);
7771 void WriteBool(
bool b);
7775 static const char*
const INDENT;
7777 enum COLLECTION_TYPE
7779 COLLECTION_TYPE_OBJECT,
7780 COLLECTION_TYPE_ARRAY,
7784 COLLECTION_TYPE type;
7785 uint32_t valueCount;
7786 bool singleLineMode;
7789 VmaStringBuilder& m_SB;
7790 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7791 bool m_InsideString;
7793 void BeginValue(
bool isString);
7794 void WriteIndent(
bool oneLess =
false);
7797 const char*
const VmaJsonWriter::INDENT =
" ";
7799 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7801 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7802 m_InsideString(false)
7806 VmaJsonWriter::~VmaJsonWriter()
7808 VMA_ASSERT(!m_InsideString);
7809 VMA_ASSERT(m_Stack.empty());
7812 void VmaJsonWriter::BeginObject(
bool singleLine)
7814 VMA_ASSERT(!m_InsideString);
7820 item.type = COLLECTION_TYPE_OBJECT;
7821 item.valueCount = 0;
7822 item.singleLineMode = singleLine;
7823 m_Stack.push_back(item);
7826 void VmaJsonWriter::EndObject()
7828 VMA_ASSERT(!m_InsideString);
7833 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7837 void VmaJsonWriter::BeginArray(
bool singleLine)
7839 VMA_ASSERT(!m_InsideString);
7845 item.type = COLLECTION_TYPE_ARRAY;
7846 item.valueCount = 0;
7847 item.singleLineMode = singleLine;
7848 m_Stack.push_back(item);
7851 void VmaJsonWriter::EndArray()
7853 VMA_ASSERT(!m_InsideString);
7858 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7862 void VmaJsonWriter::WriteString(
const char* pStr)
7868 void VmaJsonWriter::BeginString(
const char* pStr)
7870 VMA_ASSERT(!m_InsideString);
7874 m_InsideString =
true;
7875 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7877 ContinueString(pStr);
7881 void VmaJsonWriter::ContinueString(
const char* pStr)
7883 VMA_ASSERT(m_InsideString);
7885 const size_t strLen = strlen(pStr);
7886 for(
size_t i = 0; i < strLen; ++i)
7919 VMA_ASSERT(0 &&
"Character not currently supported.");
7925 void VmaJsonWriter::ContinueString(uint32_t n)
7927 VMA_ASSERT(m_InsideString);
7931 void VmaJsonWriter::ContinueString(uint64_t n)
7933 VMA_ASSERT(m_InsideString);
7937 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7939 VMA_ASSERT(m_InsideString);
7940 m_SB.AddPointer(ptr);
7943 void VmaJsonWriter::EndString(
const char* pStr)
7945 VMA_ASSERT(m_InsideString);
7946 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7948 ContinueString(pStr);
7951 m_InsideString =
false;
7954 void VmaJsonWriter::WriteNumber(uint32_t n)
7956 VMA_ASSERT(!m_InsideString);
7961 void VmaJsonWriter::WriteNumber(uint64_t n)
7963 VMA_ASSERT(!m_InsideString);
7968 void VmaJsonWriter::WriteBool(
bool b)
7970 VMA_ASSERT(!m_InsideString);
7972 m_SB.Add(b ?
"true" :
"false");
7975 void VmaJsonWriter::WriteNull()
7977 VMA_ASSERT(!m_InsideString);
7982 void VmaJsonWriter::BeginValue(
bool isString)
7984 if(!m_Stack.empty())
7986 StackItem& currItem = m_Stack.back();
7987 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7988 currItem.valueCount % 2 == 0)
7990 VMA_ASSERT(isString);
7993 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7994 currItem.valueCount % 2 != 0)
7998 else if(currItem.valueCount > 0)
8007 ++currItem.valueCount;
8011 void VmaJsonWriter::WriteIndent(
bool oneLess)
8013 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
8017 size_t count = m_Stack.size();
8018 if(count > 0 && oneLess)
8022 for(
size_t i = 0; i < count; ++i)
8029 #endif // #if VMA_STATS_STRING_ENABLED
8033 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
8035 if(IsUserDataString())
8037 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
8039 FreeUserDataString(hAllocator);
8041 if(pUserData != VMA_NULL)
8043 m_pUserData = VmaCreateStringCopy(hAllocator->GetAllocationCallbacks(), (
const char*)pUserData);
8048 m_pUserData = pUserData;
8052 void VmaAllocation_T::ChangeBlockAllocation(
8054 VmaDeviceMemoryBlock* block,
8055 VkDeviceSize offset)
8057 VMA_ASSERT(block != VMA_NULL);
8058 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8061 if(block != m_BlockAllocation.m_Block)
8063 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
8064 if(IsPersistentMap())
8066 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
8067 block->Map(hAllocator, mapRefCount, VMA_NULL);
8070 m_BlockAllocation.m_Block = block;
8071 m_BlockAllocation.m_Offset = offset;
8074 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
8076 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
8077 m_BlockAllocation.m_Offset = newOffset;
8080 VkDeviceSize VmaAllocation_T::GetOffset()
const
8084 case ALLOCATION_TYPE_BLOCK:
8085 return m_BlockAllocation.m_Offset;
8086 case ALLOCATION_TYPE_DEDICATED:
8094 VkDeviceMemory VmaAllocation_T::GetMemory()
const
8098 case ALLOCATION_TYPE_BLOCK:
8099 return m_BlockAllocation.m_Block->GetDeviceMemory();
8100 case ALLOCATION_TYPE_DEDICATED:
8101 return m_DedicatedAllocation.m_hMemory;
8104 return VK_NULL_HANDLE;
8108 void* VmaAllocation_T::GetMappedData()
const
8112 case ALLOCATION_TYPE_BLOCK:
8115 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
8116 VMA_ASSERT(pBlockData != VMA_NULL);
8117 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
8124 case ALLOCATION_TYPE_DEDICATED:
8125 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
8126 return m_DedicatedAllocation.m_pMappedData;
8133 bool VmaAllocation_T::CanBecomeLost()
const
8137 case ALLOCATION_TYPE_BLOCK:
8138 return m_BlockAllocation.m_CanBecomeLost;
8139 case ALLOCATION_TYPE_DEDICATED:
8147 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8149 VMA_ASSERT(CanBecomeLost());
8155 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
8158 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8163 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
8169 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
8179 #if VMA_STATS_STRING_ENABLED
8182 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
8191 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const
8193 json.WriteString(
"Type");
8194 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
8196 json.WriteString(
"Size");
8197 json.WriteNumber(m_Size);
8199 if(m_pUserData != VMA_NULL)
8201 json.WriteString(
"UserData");
8202 if(IsUserDataString())
8204 json.WriteString((
const char*)m_pUserData);
8209 json.ContinueString_Pointer(m_pUserData);
8214 json.WriteString(
"CreationFrameIndex");
8215 json.WriteNumber(m_CreationFrameIndex);
8217 json.WriteString(
"LastUseFrameIndex");
8218 json.WriteNumber(GetLastUseFrameIndex());
8220 if(m_BufferImageUsage != 0)
8222 json.WriteString(
"Usage");
8223 json.WriteNumber(m_BufferImageUsage);
8229 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
8231 VMA_ASSERT(IsUserDataString());
8232 VmaFreeString(hAllocator->GetAllocationCallbacks(), (
char*)m_pUserData);
8233 m_pUserData = VMA_NULL;
8236 void VmaAllocation_T::BlockAllocMap()
8238 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8240 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8246 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
8250 void VmaAllocation_T::BlockAllocUnmap()
8252 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
8254 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8260 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
8264 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
8266 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8270 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
8272 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
8273 *ppData = m_DedicatedAllocation.m_pMappedData;
8279 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
8280 return VK_ERROR_MEMORY_MAP_FAILED;
8285 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
8286 hAllocator->m_hDevice,
8287 m_DedicatedAllocation.m_hMemory,
8292 if(result == VK_SUCCESS)
8294 m_DedicatedAllocation.m_pMappedData = *ppData;
8301 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
8303 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
8305 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
8310 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
8311 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
8312 hAllocator->m_hDevice,
8313 m_DedicatedAllocation.m_hMemory);
8318 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
8322 #if VMA_STATS_STRING_ENABLED
8324 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
8328 json.WriteString(
"Blocks");
8331 json.WriteString(
"Allocations");
8334 json.WriteString(
"UnusedRanges");
8337 json.WriteString(
"UsedBytes");
8340 json.WriteString(
"UnusedBytes");
8345 json.WriteString(
"AllocationSize");
8346 json.BeginObject(
true);
8347 json.WriteString(
"Min");
8349 json.WriteString(
"Avg");
8351 json.WriteString(
"Max");
8358 json.WriteString(
"UnusedRangeSize");
8359 json.BeginObject(
true);
8360 json.WriteString(
"Min");
8362 json.WriteString(
"Avg");
8364 json.WriteString(
"Max");
8372 #endif // #if VMA_STATS_STRING_ENABLED
8374 struct VmaSuballocationItemSizeLess
8377 const VmaSuballocationList::iterator lhs,
8378 const VmaSuballocationList::iterator rhs)
const
8380 return lhs->size < rhs->size;
8383 const VmaSuballocationList::iterator lhs,
8384 VkDeviceSize rhsSize)
const
8386 return lhs->size < rhsSize;
8394 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
8396 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
8400 #if VMA_STATS_STRING_ENABLED
8402 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
8403 VkDeviceSize unusedBytes,
8404 size_t allocationCount,
8405 size_t unusedRangeCount)
const
8409 json.WriteString(
"TotalBytes");
8410 json.WriteNumber(GetSize());
8412 json.WriteString(
"UnusedBytes");
8413 json.WriteNumber(unusedBytes);
8415 json.WriteString(
"Allocations");
8416 json.WriteNumber((uint64_t)allocationCount);
8418 json.WriteString(
"UnusedRanges");
8419 json.WriteNumber((uint64_t)unusedRangeCount);
8421 json.WriteString(
"Suballocations");
8425 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
8426 VkDeviceSize offset,
8429 json.BeginObject(
true);
8431 json.WriteString(
"Offset");
8432 json.WriteNumber(offset);
8434 hAllocation->PrintParameters(json);
8439 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
8440 VkDeviceSize offset,
8441 VkDeviceSize size)
const
8443 json.BeginObject(
true);
8445 json.WriteString(
"Offset");
8446 json.WriteNumber(offset);
8448 json.WriteString(
"Type");
8449 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
8451 json.WriteString(
"Size");
8452 json.WriteNumber(size);
8457 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const
8463 #endif // #if VMA_STATS_STRING_ENABLED
8468 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
8469 VmaBlockMetadata(hAllocator),
8472 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8473 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
8477 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
8481 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
8483 VmaBlockMetadata::Init(size);
8486 m_SumFreeSize = size;
8488 VmaSuballocation suballoc = {};
8489 suballoc.offset = 0;
8490 suballoc.size = size;
8491 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8492 suballoc.hAllocation = VK_NULL_HANDLE;
8494 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8495 m_Suballocations.push_back(suballoc);
8496 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
8498 m_FreeSuballocationsBySize.push_back(suballocItem);
8501 bool VmaBlockMetadata_Generic::Validate()
const
8503 VMA_VALIDATE(!m_Suballocations.empty());
8506 VkDeviceSize calculatedOffset = 0;
8508 uint32_t calculatedFreeCount = 0;
8510 VkDeviceSize calculatedSumFreeSize = 0;
8513 size_t freeSuballocationsToRegister = 0;
8515 bool prevFree =
false;
8517 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8518 suballocItem != m_Suballocations.cend();
8521 const VmaSuballocation& subAlloc = *suballocItem;
8524 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
8526 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
8528 VMA_VALIDATE(!prevFree || !currFree);
8530 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
8534 calculatedSumFreeSize += subAlloc.size;
8535 ++calculatedFreeCount;
8536 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8538 ++freeSuballocationsToRegister;
8542 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
8546 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
8547 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
8550 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
8553 calculatedOffset += subAlloc.size;
8554 prevFree = currFree;
8559 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
8561 VkDeviceSize lastSize = 0;
8562 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
8564 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
8567 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8569 VMA_VALIDATE(suballocItem->size >= lastSize);
8571 lastSize = suballocItem->size;
8575 VMA_VALIDATE(ValidateFreeSuballocationList());
8576 VMA_VALIDATE(calculatedOffset == GetSize());
8577 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
8578 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
8583 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const
8585 if(!m_FreeSuballocationsBySize.empty())
8587 return m_FreeSuballocationsBySize.back()->size;
8595 bool VmaBlockMetadata_Generic::IsEmpty()
const
8597 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
8600 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
8604 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8616 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8617 suballocItem != m_Suballocations.cend();
8620 const VmaSuballocation& suballoc = *suballocItem;
8621 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8634 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const
8636 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
8638 inoutStats.
size += GetSize();
8645 #if VMA_STATS_STRING_ENABLED
8647 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const
8649 PrintDetailedMap_Begin(json,
8651 m_Suballocations.size() - (
size_t)m_FreeCount,
8655 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
8656 suballocItem != m_Suballocations.cend();
8657 ++suballocItem, ++i)
8659 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8661 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8665 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8669 PrintDetailedMap_End(json);
8672 #endif // #if VMA_STATS_STRING_ENABLED
8674 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8675 uint32_t currentFrameIndex,
8676 uint32_t frameInUseCount,
8677 VkDeviceSize bufferImageGranularity,
8678 VkDeviceSize allocSize,
8679 VkDeviceSize allocAlignment,
8681 VmaSuballocationType allocType,
8682 bool canMakeOtherLost,
8684 VmaAllocationRequest* pAllocationRequest)
8686 VMA_ASSERT(allocSize > 0);
8687 VMA_ASSERT(!upperAddress);
8688 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8689 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8690 VMA_HEAVY_ASSERT(Validate());
8692 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8695 if(canMakeOtherLost ==
false &&
8696 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8702 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8703 if(freeSuballocCount > 0)
8708 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8709 m_FreeSuballocationsBySize.data(),
8710 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8711 allocSize + 2 * VMA_DEBUG_MARGIN,
8712 VmaSuballocationItemSizeLess());
8713 size_t index = it - m_FreeSuballocationsBySize.data();
8714 for(; index < freeSuballocCount; ++index)
8719 bufferImageGranularity,
8723 m_FreeSuballocationsBySize[index],
8725 &pAllocationRequest->offset,
8726 &pAllocationRequest->itemsToMakeLostCount,
8727 &pAllocationRequest->sumFreeSize,
8728 &pAllocationRequest->sumItemSize))
8730 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8735 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8737 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8738 it != m_Suballocations.end();
8741 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8744 bufferImageGranularity,
8750 &pAllocationRequest->offset,
8751 &pAllocationRequest->itemsToMakeLostCount,
8752 &pAllocationRequest->sumFreeSize,
8753 &pAllocationRequest->sumItemSize))
8755 pAllocationRequest->item = it;
8763 for(
size_t index = freeSuballocCount; index--; )
8768 bufferImageGranularity,
8772 m_FreeSuballocationsBySize[index],
8774 &pAllocationRequest->offset,
8775 &pAllocationRequest->itemsToMakeLostCount,
8776 &pAllocationRequest->sumFreeSize,
8777 &pAllocationRequest->sumItemSize))
8779 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8786 if(canMakeOtherLost)
8791 VmaAllocationRequest tmpAllocRequest = {};
8792 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8793 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8794 suballocIt != m_Suballocations.end();
8797 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8798 suballocIt->hAllocation->CanBecomeLost())
8803 bufferImageGranularity,
8809 &tmpAllocRequest.offset,
8810 &tmpAllocRequest.itemsToMakeLostCount,
8811 &tmpAllocRequest.sumFreeSize,
8812 &tmpAllocRequest.sumItemSize))
8816 *pAllocationRequest = tmpAllocRequest;
8817 pAllocationRequest->item = suballocIt;
8820 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8822 *pAllocationRequest = tmpAllocRequest;
8823 pAllocationRequest->item = suballocIt;
8836 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8837 uint32_t currentFrameIndex,
8838 uint32_t frameInUseCount,
8839 VmaAllocationRequest* pAllocationRequest)
8841 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8843 while(pAllocationRequest->itemsToMakeLostCount > 0)
8845 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8847 ++pAllocationRequest->item;
8849 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8850 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8851 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8852 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8854 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8855 --pAllocationRequest->itemsToMakeLostCount;
8863 VMA_HEAVY_ASSERT(Validate());
8864 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8865 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8870 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8872 uint32_t lostAllocationCount = 0;
8873 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8874 it != m_Suballocations.end();
8877 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8878 it->hAllocation->CanBecomeLost() &&
8879 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8881 it = FreeSuballocation(it);
8882 ++lostAllocationCount;
8885 return lostAllocationCount;
8888 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8890 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8891 it != m_Suballocations.end();
8894 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8896 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8898 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8899 return VK_ERROR_VALIDATION_FAILED_EXT;
8901 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8903 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8904 return VK_ERROR_VALIDATION_FAILED_EXT;
8912 void VmaBlockMetadata_Generic::Alloc(
8913 const VmaAllocationRequest& request,
8914 VmaSuballocationType type,
8915 VkDeviceSize allocSize,
8918 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8919 VMA_ASSERT(request.item != m_Suballocations.end());
8920 VmaSuballocation& suballoc = *request.item;
8922 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8924 VMA_ASSERT(request.offset >= suballoc.offset);
8925 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8926 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8927 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8931 UnregisterFreeSuballocation(request.item);
8933 suballoc.offset = request.offset;
8934 suballoc.size = allocSize;
8935 suballoc.type = type;
8936 suballoc.hAllocation = hAllocation;
8941 VmaSuballocation paddingSuballoc = {};
8942 paddingSuballoc.offset = request.offset + allocSize;
8943 paddingSuballoc.size = paddingEnd;
8944 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8945 VmaSuballocationList::iterator next = request.item;
8947 const VmaSuballocationList::iterator paddingEndItem =
8948 m_Suballocations.insert(next, paddingSuballoc);
8949 RegisterFreeSuballocation(paddingEndItem);
8955 VmaSuballocation paddingSuballoc = {};
8956 paddingSuballoc.offset = request.offset - paddingBegin;
8957 paddingSuballoc.size = paddingBegin;
8958 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8959 const VmaSuballocationList::iterator paddingBeginItem =
8960 m_Suballocations.insert(request.item, paddingSuballoc);
8961 RegisterFreeSuballocation(paddingBeginItem);
8965 m_FreeCount = m_FreeCount - 1;
8966 if(paddingBegin > 0)
8974 m_SumFreeSize -= allocSize;
8977 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8979 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8980 suballocItem != m_Suballocations.end();
8983 VmaSuballocation& suballoc = *suballocItem;
8984 if(suballoc.hAllocation == allocation)
8986 FreeSuballocation(suballocItem);
8987 VMA_HEAVY_ASSERT(Validate());
8991 VMA_ASSERT(0 &&
"Not found!");
8994 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8996 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8997 suballocItem != m_Suballocations.end();
9000 VmaSuballocation& suballoc = *suballocItem;
9001 if(suballoc.offset == offset)
9003 FreeSuballocation(suballocItem);
9007 VMA_ASSERT(0 &&
"Not found!");
9010 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const
9012 VkDeviceSize lastSize = 0;
9013 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
9015 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
9017 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
9018 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
9019 VMA_VALIDATE(it->size >= lastSize);
9020 lastSize = it->size;
9025 bool VmaBlockMetadata_Generic::CheckAllocation(
9026 uint32_t currentFrameIndex,
9027 uint32_t frameInUseCount,
9028 VkDeviceSize bufferImageGranularity,
9029 VkDeviceSize allocSize,
9030 VkDeviceSize allocAlignment,
9031 VmaSuballocationType allocType,
9032 VmaSuballocationList::const_iterator suballocItem,
9033 bool canMakeOtherLost,
9034 VkDeviceSize* pOffset,
9035 size_t* itemsToMakeLostCount,
9036 VkDeviceSize* pSumFreeSize,
9037 VkDeviceSize* pSumItemSize)
const
9039 VMA_ASSERT(allocSize > 0);
9040 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9041 VMA_ASSERT(suballocItem != m_Suballocations.cend());
9042 VMA_ASSERT(pOffset != VMA_NULL);
9044 *itemsToMakeLostCount = 0;
9048 if(canMakeOtherLost)
9050 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9052 *pSumFreeSize = suballocItem->size;
9056 if(suballocItem->hAllocation->CanBecomeLost() &&
9057 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9059 ++*itemsToMakeLostCount;
9060 *pSumItemSize = suballocItem->size;
9069 if(GetSize() - suballocItem->offset < allocSize)
9075 *pOffset = suballocItem->offset;
9078 if(VMA_DEBUG_MARGIN > 0)
9080 *pOffset += VMA_DEBUG_MARGIN;
9084 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9088 if(bufferImageGranularity > 1)
9090 bool bufferImageGranularityConflict =
false;
9091 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9092 while(prevSuballocItem != m_Suballocations.cbegin())
9095 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9096 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9098 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9100 bufferImageGranularityConflict =
true;
9108 if(bufferImageGranularityConflict)
9110 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9116 if(*pOffset >= suballocItem->offset + suballocItem->size)
9122 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
9125 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9127 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
9129 if(suballocItem->offset + totalSize > GetSize())
9136 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
9137 if(totalSize > suballocItem->size)
9139 VkDeviceSize remainingSize = totalSize - suballocItem->size;
9140 while(remainingSize > 0)
9143 if(lastSuballocItem == m_Suballocations.cend())
9147 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9149 *pSumFreeSize += lastSuballocItem->size;
9153 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
9154 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
9155 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9157 ++*itemsToMakeLostCount;
9158 *pSumItemSize += lastSuballocItem->size;
9165 remainingSize = (lastSuballocItem->size < remainingSize) ?
9166 remainingSize - lastSuballocItem->size : 0;
9172 if(bufferImageGranularity > 1)
9174 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
9176 while(nextSuballocItem != m_Suballocations.cend())
9178 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9179 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9181 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9183 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
9184 if(nextSuballoc.hAllocation->CanBecomeLost() &&
9185 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9187 ++*itemsToMakeLostCount;
9206 const VmaSuballocation& suballoc = *suballocItem;
9207 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9209 *pSumFreeSize = suballoc.size;
9212 if(suballoc.size < allocSize)
9218 *pOffset = suballoc.offset;
9221 if(VMA_DEBUG_MARGIN > 0)
9223 *pOffset += VMA_DEBUG_MARGIN;
9227 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
9231 if(bufferImageGranularity > 1)
9233 bool bufferImageGranularityConflict =
false;
9234 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
9235 while(prevSuballocItem != m_Suballocations.cbegin())
9238 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
9239 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
9241 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9243 bufferImageGranularityConflict =
true;
9251 if(bufferImageGranularityConflict)
9253 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
9258 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
9261 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
9264 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
9271 if(bufferImageGranularity > 1)
9273 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
9275 while(nextSuballocItem != m_Suballocations.cend())
9277 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
9278 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9280 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9299 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
9301 VMA_ASSERT(item != m_Suballocations.end());
9302 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9304 VmaSuballocationList::iterator nextItem = item;
9306 VMA_ASSERT(nextItem != m_Suballocations.end());
9307 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
9309 item->size += nextItem->size;
9311 m_Suballocations.erase(nextItem);
9314 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
9317 VmaSuballocation& suballoc = *suballocItem;
9318 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9319 suballoc.hAllocation = VK_NULL_HANDLE;
9323 m_SumFreeSize += suballoc.size;
9326 bool mergeWithNext =
false;
9327 bool mergeWithPrev =
false;
9329 VmaSuballocationList::iterator nextItem = suballocItem;
9331 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
9333 mergeWithNext =
true;
9336 VmaSuballocationList::iterator prevItem = suballocItem;
9337 if(suballocItem != m_Suballocations.begin())
9340 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
9342 mergeWithPrev =
true;
9348 UnregisterFreeSuballocation(nextItem);
9349 MergeFreeWithNext(suballocItem);
9354 UnregisterFreeSuballocation(prevItem);
9355 MergeFreeWithNext(prevItem);
9356 RegisterFreeSuballocation(prevItem);
9361 RegisterFreeSuballocation(suballocItem);
9362 return suballocItem;
9366 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
9368 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9369 VMA_ASSERT(item->size > 0);
9373 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9375 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9377 if(m_FreeSuballocationsBySize.empty())
9379 m_FreeSuballocationsBySize.push_back(item);
9383 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
9391 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
9393 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
9394 VMA_ASSERT(item->size > 0);
9398 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
9400 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
9402 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
9403 m_FreeSuballocationsBySize.data(),
9404 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
9406 VmaSuballocationItemSizeLess());
9407 for(
size_t index = it - m_FreeSuballocationsBySize.data();
9408 index < m_FreeSuballocationsBySize.size();
9411 if(m_FreeSuballocationsBySize[index] == item)
9413 VmaVectorRemove(m_FreeSuballocationsBySize, index);
9416 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
9418 VMA_ASSERT(0 &&
"Not found.");
9424 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
9425 VkDeviceSize bufferImageGranularity,
9426 VmaSuballocationType& inOutPrevSuballocType)
const
9428 if(bufferImageGranularity == 1 || IsEmpty())
9433 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
9434 bool typeConflictFound =
false;
9435 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
9436 it != m_Suballocations.cend();
9439 const VmaSuballocationType suballocType = it->type;
9440 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
9442 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
9443 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
9445 typeConflictFound =
true;
9447 inOutPrevSuballocType = suballocType;
9451 return typeConflictFound || minAlignment >= bufferImageGranularity;
9457 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
9458 VmaBlockMetadata(hAllocator),
9460 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9461 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
9462 m_1stVectorIndex(0),
9463 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
9464 m_1stNullItemsBeginCount(0),
9465 m_1stNullItemsMiddleCount(0),
9466 m_2ndNullItemsCount(0)
9470 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
9474 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
9476 VmaBlockMetadata::Init(size);
9477 m_SumFreeSize = size;
9480 bool VmaBlockMetadata_Linear::Validate()
const
9482 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9483 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9485 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
9486 VMA_VALIDATE(!suballocations1st.empty() ||
9487 suballocations2nd.empty() ||
9488 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
9490 if(!suballocations1st.empty())
9493 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
9495 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
9497 if(!suballocations2nd.empty())
9500 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
9503 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
9504 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
9506 VkDeviceSize sumUsedSize = 0;
9507 const size_t suballoc1stCount = suballocations1st.size();
9508 VkDeviceSize offset = VMA_DEBUG_MARGIN;
9510 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9512 const size_t suballoc2ndCount = suballocations2nd.size();
9513 size_t nullItem2ndCount = 0;
9514 for(
size_t i = 0; i < suballoc2ndCount; ++i)
9516 const VmaSuballocation& suballoc = suballocations2nd[i];
9517 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9519 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9520 VMA_VALIDATE(suballoc.offset >= offset);
9524 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9525 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9526 sumUsedSize += suballoc.size;
9533 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9536 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9539 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
9541 const VmaSuballocation& suballoc = suballocations1st[i];
9542 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
9543 suballoc.hAllocation == VK_NULL_HANDLE);
9546 size_t nullItem1stCount = m_1stNullItemsBeginCount;
9548 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
9550 const VmaSuballocation& suballoc = suballocations1st[i];
9551 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9553 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9554 VMA_VALIDATE(suballoc.offset >= offset);
9555 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
9559 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9560 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9561 sumUsedSize += suballoc.size;
9568 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9570 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
9572 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9574 const size_t suballoc2ndCount = suballocations2nd.size();
9575 size_t nullItem2ndCount = 0;
9576 for(
size_t i = suballoc2ndCount; i--; )
9578 const VmaSuballocation& suballoc = suballocations2nd[i];
9579 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
9581 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
9582 VMA_VALIDATE(suballoc.offset >= offset);
9586 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
9587 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
9588 sumUsedSize += suballoc.size;
9595 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
9598 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
9601 VMA_VALIDATE(offset <= GetSize());
9602 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
9607 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const
9609 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
9610 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
9613 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const
9615 const VkDeviceSize size = GetSize();
9627 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9629 switch(m_2ndVectorMode)
9631 case SECOND_VECTOR_EMPTY:
9637 const size_t suballocations1stCount = suballocations1st.size();
9638 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
9639 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9640 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
9642 firstSuballoc.offset,
9643 size - (lastSuballoc.offset + lastSuballoc.size));
9647 case SECOND_VECTOR_RING_BUFFER:
9652 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9653 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9654 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9655 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9659 case SECOND_VECTOR_DOUBLE_STACK:
9664 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9665 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9666 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9667 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9677 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
9679 const VkDeviceSize size = GetSize();
9680 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9681 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9682 const size_t suballoc1stCount = suballocations1st.size();
9683 const size_t suballoc2ndCount = suballocations2nd.size();
9694 VkDeviceSize lastOffset = 0;
9696 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9698 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9699 size_t nextAlloc2ndIndex = 0;
9700 while(lastOffset < freeSpace2ndTo1stEnd)
9703 while(nextAlloc2ndIndex < suballoc2ndCount &&
9704 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9706 ++nextAlloc2ndIndex;
9710 if(nextAlloc2ndIndex < suballoc2ndCount)
9712 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9715 if(lastOffset < suballoc.offset)
9718 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9732 lastOffset = suballoc.offset + suballoc.size;
9733 ++nextAlloc2ndIndex;
9739 if(lastOffset < freeSpace2ndTo1stEnd)
9741 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9749 lastOffset = freeSpace2ndTo1stEnd;
9754 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9755 const VkDeviceSize freeSpace1stTo2ndEnd =
9756 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9757 while(lastOffset < freeSpace1stTo2ndEnd)
9760 while(nextAlloc1stIndex < suballoc1stCount &&
9761 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9763 ++nextAlloc1stIndex;
9767 if(nextAlloc1stIndex < suballoc1stCount)
9769 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9772 if(lastOffset < suballoc.offset)
9775 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9789 lastOffset = suballoc.offset + suballoc.size;
9790 ++nextAlloc1stIndex;
9796 if(lastOffset < freeSpace1stTo2ndEnd)
9798 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9806 lastOffset = freeSpace1stTo2ndEnd;
9810 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9812 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9813 while(lastOffset < size)
9816 while(nextAlloc2ndIndex != SIZE_MAX &&
9817 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9819 --nextAlloc2ndIndex;
9823 if(nextAlloc2ndIndex != SIZE_MAX)
9825 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9828 if(lastOffset < suballoc.offset)
9831 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9845 lastOffset = suballoc.offset + suballoc.size;
9846 --nextAlloc2ndIndex;
9852 if(lastOffset < size)
9854 const VkDeviceSize unusedRangeSize = size - lastOffset;
9870 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const
9872 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9873 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9874 const VkDeviceSize size = GetSize();
9875 const size_t suballoc1stCount = suballocations1st.size();
9876 const size_t suballoc2ndCount = suballocations2nd.size();
9878 inoutStats.
size += size;
9880 VkDeviceSize lastOffset = 0;
9882 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9884 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9885 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9886 while(lastOffset < freeSpace2ndTo1stEnd)
9889 while(nextAlloc2ndIndex < suballoc2ndCount &&
9890 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9892 ++nextAlloc2ndIndex;
9896 if(nextAlloc2ndIndex < suballoc2ndCount)
9898 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9901 if(lastOffset < suballoc.offset)
9904 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9915 lastOffset = suballoc.offset + suballoc.size;
9916 ++nextAlloc2ndIndex;
9921 if(lastOffset < freeSpace2ndTo1stEnd)
9924 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9931 lastOffset = freeSpace2ndTo1stEnd;
9936 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9937 const VkDeviceSize freeSpace1stTo2ndEnd =
9938 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9939 while(lastOffset < freeSpace1stTo2ndEnd)
9942 while(nextAlloc1stIndex < suballoc1stCount &&
9943 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9945 ++nextAlloc1stIndex;
9949 if(nextAlloc1stIndex < suballoc1stCount)
9951 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9954 if(lastOffset < suballoc.offset)
9957 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9968 lastOffset = suballoc.offset + suballoc.size;
9969 ++nextAlloc1stIndex;
9974 if(lastOffset < freeSpace1stTo2ndEnd)
9977 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9984 lastOffset = freeSpace1stTo2ndEnd;
9988 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9990 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9991 while(lastOffset < size)
9994 while(nextAlloc2ndIndex != SIZE_MAX &&
9995 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9997 --nextAlloc2ndIndex;
10001 if(nextAlloc2ndIndex != SIZE_MAX)
10003 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10006 if(lastOffset < suballoc.offset)
10009 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10020 lastOffset = suballoc.offset + suballoc.size;
10021 --nextAlloc2ndIndex;
10026 if(lastOffset < size)
10029 const VkDeviceSize unusedRangeSize = size - lastOffset;
10042 #if VMA_STATS_STRING_ENABLED
10043 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const
10045 const VkDeviceSize size = GetSize();
10046 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10047 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10048 const size_t suballoc1stCount = suballocations1st.size();
10049 const size_t suballoc2ndCount = suballocations2nd.size();
10053 size_t unusedRangeCount = 0;
10054 VkDeviceSize usedBytes = 0;
10056 VkDeviceSize lastOffset = 0;
10058 size_t alloc2ndCount = 0;
10059 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10061 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10062 size_t nextAlloc2ndIndex = 0;
10063 while(lastOffset < freeSpace2ndTo1stEnd)
10066 while(nextAlloc2ndIndex < suballoc2ndCount &&
10067 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10069 ++nextAlloc2ndIndex;
10073 if(nextAlloc2ndIndex < suballoc2ndCount)
10075 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10078 if(lastOffset < suballoc.offset)
10081 ++unusedRangeCount;
10087 usedBytes += suballoc.size;
10090 lastOffset = suballoc.offset + suballoc.size;
10091 ++nextAlloc2ndIndex;
10096 if(lastOffset < freeSpace2ndTo1stEnd)
10099 ++unusedRangeCount;
10103 lastOffset = freeSpace2ndTo1stEnd;
10108 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
10109 size_t alloc1stCount = 0;
10110 const VkDeviceSize freeSpace1stTo2ndEnd =
10111 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
10112 while(lastOffset < freeSpace1stTo2ndEnd)
10115 while(nextAlloc1stIndex < suballoc1stCount &&
10116 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10118 ++nextAlloc1stIndex;
10122 if(nextAlloc1stIndex < suballoc1stCount)
10124 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10127 if(lastOffset < suballoc.offset)
10130 ++unusedRangeCount;
10136 usedBytes += suballoc.size;
10139 lastOffset = suballoc.offset + suballoc.size;
10140 ++nextAlloc1stIndex;
10145 if(lastOffset < size)
10148 ++unusedRangeCount;
10152 lastOffset = freeSpace1stTo2ndEnd;
10156 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10158 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10159 while(lastOffset < size)
10162 while(nextAlloc2ndIndex != SIZE_MAX &&
10163 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10165 --nextAlloc2ndIndex;
10169 if(nextAlloc2ndIndex != SIZE_MAX)
10171 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10174 if(lastOffset < suballoc.offset)
10177 ++unusedRangeCount;
10183 usedBytes += suballoc.size;
10186 lastOffset = suballoc.offset + suballoc.size;
10187 --nextAlloc2ndIndex;
10192 if(lastOffset < size)
10195 ++unusedRangeCount;
10204 const VkDeviceSize unusedBytes = size - usedBytes;
10205 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
10210 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10212 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
10213 size_t nextAlloc2ndIndex = 0;
10214 while(lastOffset < freeSpace2ndTo1stEnd)
10217 while(nextAlloc2ndIndex < suballoc2ndCount &&
10218 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10220 ++nextAlloc2ndIndex;
10224 if(nextAlloc2ndIndex < suballoc2ndCount)
10226 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10229 if(lastOffset < suballoc.offset)
10232 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10233 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10238 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10241 lastOffset = suballoc.offset + suballoc.size;
10242 ++nextAlloc2ndIndex;
10247 if(lastOffset < freeSpace2ndTo1stEnd)
10250 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
10251 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10255 lastOffset = freeSpace2ndTo1stEnd;
10260 nextAlloc1stIndex = m_1stNullItemsBeginCount;
10261 while(lastOffset < freeSpace1stTo2ndEnd)
10264 while(nextAlloc1stIndex < suballoc1stCount &&
10265 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
10267 ++nextAlloc1stIndex;
10271 if(nextAlloc1stIndex < suballoc1stCount)
10273 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
10276 if(lastOffset < suballoc.offset)
10279 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10280 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10285 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10288 lastOffset = suballoc.offset + suballoc.size;
10289 ++nextAlloc1stIndex;
10294 if(lastOffset < freeSpace1stTo2ndEnd)
10297 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
10298 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10302 lastOffset = freeSpace1stTo2ndEnd;
10306 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10308 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
10309 while(lastOffset < size)
10312 while(nextAlloc2ndIndex != SIZE_MAX &&
10313 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
10315 --nextAlloc2ndIndex;
10319 if(nextAlloc2ndIndex != SIZE_MAX)
10321 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
10324 if(lastOffset < suballoc.offset)
10327 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
10328 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10333 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
10336 lastOffset = suballoc.offset + suballoc.size;
10337 --nextAlloc2ndIndex;
10342 if(lastOffset < size)
10345 const VkDeviceSize unusedRangeSize = size - lastOffset;
10346 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
10355 PrintDetailedMap_End(json);
10357 #endif // #if VMA_STATS_STRING_ENABLED
10359 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
10360 uint32_t currentFrameIndex,
10361 uint32_t frameInUseCount,
10362 VkDeviceSize bufferImageGranularity,
10363 VkDeviceSize allocSize,
10364 VkDeviceSize allocAlignment,
10366 VmaSuballocationType allocType,
10367 bool canMakeOtherLost,
10369 VmaAllocationRequest* pAllocationRequest)
10371 VMA_ASSERT(allocSize > 0);
10372 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
10373 VMA_ASSERT(pAllocationRequest != VMA_NULL);
10374 VMA_HEAVY_ASSERT(Validate());
10375 return upperAddress ?
10376 CreateAllocationRequest_UpperAddress(
10377 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10378 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
10379 CreateAllocationRequest_LowerAddress(
10380 currentFrameIndex, frameInUseCount, bufferImageGranularity,
10381 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
10384 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
10385 uint32_t currentFrameIndex,
10386 uint32_t frameInUseCount,
10387 VkDeviceSize bufferImageGranularity,
10388 VkDeviceSize allocSize,
10389 VkDeviceSize allocAlignment,
10390 VmaSuballocationType allocType,
10391 bool canMakeOtherLost,
10393 VmaAllocationRequest* pAllocationRequest)
10395 const VkDeviceSize size = GetSize();
10396 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10397 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10399 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10401 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
10406 if(allocSize > size)
10410 VkDeviceSize resultBaseOffset = size - allocSize;
10411 if(!suballocations2nd.empty())
10413 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10414 resultBaseOffset = lastSuballoc.offset - allocSize;
10415 if(allocSize > lastSuballoc.offset)
10422 VkDeviceSize resultOffset = resultBaseOffset;
10425 if(VMA_DEBUG_MARGIN > 0)
10427 if(resultOffset < VMA_DEBUG_MARGIN)
10431 resultOffset -= VMA_DEBUG_MARGIN;
10435 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
10439 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10441 bool bufferImageGranularityConflict =
false;
10442 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10444 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10445 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10447 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
10449 bufferImageGranularityConflict =
true;
10457 if(bufferImageGranularityConflict)
10459 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
10464 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
10465 suballocations1st.back().offset + suballocations1st.back().size :
10467 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
10471 if(bufferImageGranularity > 1)
10473 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10475 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10476 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10478 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
10492 pAllocationRequest->offset = resultOffset;
10493 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
10494 pAllocationRequest->sumItemSize = 0;
10496 pAllocationRequest->itemsToMakeLostCount = 0;
10497 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
10504 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
10505 uint32_t currentFrameIndex,
10506 uint32_t frameInUseCount,
10507 VkDeviceSize bufferImageGranularity,
10508 VkDeviceSize allocSize,
10509 VkDeviceSize allocAlignment,
10510 VmaSuballocationType allocType,
10511 bool canMakeOtherLost,
10513 VmaAllocationRequest* pAllocationRequest)
10515 const VkDeviceSize size = GetSize();
10516 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10517 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10519 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10523 VkDeviceSize resultBaseOffset = 0;
10524 if(!suballocations1st.empty())
10526 const VmaSuballocation& lastSuballoc = suballocations1st.back();
10527 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10531 VkDeviceSize resultOffset = resultBaseOffset;
10534 if(VMA_DEBUG_MARGIN > 0)
10536 resultOffset += VMA_DEBUG_MARGIN;
10540 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10544 if(bufferImageGranularity > 1 && !suballocations1st.empty())
10546 bool bufferImageGranularityConflict =
false;
10547 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
10549 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
10550 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10552 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10554 bufferImageGranularityConflict =
true;
10562 if(bufferImageGranularityConflict)
10564 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10568 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
10569 suballocations2nd.back().offset : size;
10572 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
10576 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10578 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
10580 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
10581 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10583 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10597 pAllocationRequest->offset = resultOffset;
10598 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
10599 pAllocationRequest->sumItemSize = 0;
10601 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
10602 pAllocationRequest->itemsToMakeLostCount = 0;
10609 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10611 VMA_ASSERT(!suballocations1st.empty());
10613 VkDeviceSize resultBaseOffset = 0;
10614 if(!suballocations2nd.empty())
10616 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
10617 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
10621 VkDeviceSize resultOffset = resultBaseOffset;
10624 if(VMA_DEBUG_MARGIN > 0)
10626 resultOffset += VMA_DEBUG_MARGIN;
10630 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
10634 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
10636 bool bufferImageGranularityConflict =
false;
10637 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
10639 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
10640 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
10642 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
10644 bufferImageGranularityConflict =
true;
10652 if(bufferImageGranularityConflict)
10654 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10658 pAllocationRequest->itemsToMakeLostCount = 0;
10659 pAllocationRequest->sumItemSize = 0;
10660 size_t index1st = m_1stNullItemsBeginCount;
10662 if(canMakeOtherLost)
10664 while(index1st < suballocations1st.size() &&
10665 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10668 const VmaSuballocation& suballoc = suballocations1st[index1st];
10669 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10675 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10676 if(suballoc.hAllocation->CanBecomeLost() &&
10677 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10679 ++pAllocationRequest->itemsToMakeLostCount;
10680 pAllocationRequest->sumItemSize += suballoc.size;
10692 if(bufferImageGranularity > 1)
10694 while(index1st < suballocations1st.size())
10696 const VmaSuballocation& suballoc = suballocations1st[index1st];
10697 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10699 if(suballoc.hAllocation != VK_NULL_HANDLE)
10702 if(suballoc.hAllocation->CanBecomeLost() &&
10703 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10705 ++pAllocationRequest->itemsToMakeLostCount;
10706 pAllocationRequest->sumItemSize += suballoc.size;
10724 if(index1st == suballocations1st.size() &&
10725 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10728 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10733 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10734 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10738 if(bufferImageGranularity > 1)
10740 for(
size_t nextSuballocIndex = index1st;
10741 nextSuballocIndex < suballocations1st.size();
10742 nextSuballocIndex++)
10744 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10745 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10747 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10761 pAllocationRequest->offset = resultOffset;
10762 pAllocationRequest->sumFreeSize =
10763 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10765 - pAllocationRequest->sumItemSize;
10766 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10775 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10776 uint32_t currentFrameIndex,
10777 uint32_t frameInUseCount,
10778 VmaAllocationRequest* pAllocationRequest)
10780 if(pAllocationRequest->itemsToMakeLostCount == 0)
10785 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10788 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10789 size_t index = m_1stNullItemsBeginCount;
10790 size_t madeLostCount = 0;
10791 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10793 if(index == suballocations->size())
10797 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10799 suballocations = &AccessSuballocations2nd();
10803 VMA_ASSERT(!suballocations->empty());
10805 VmaSuballocation& suballoc = (*suballocations)[index];
10806 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10808 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10809 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10810 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10812 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10813 suballoc.hAllocation = VK_NULL_HANDLE;
10814 m_SumFreeSize += suballoc.size;
10815 if(suballocations == &AccessSuballocations1st())
10817 ++m_1stNullItemsMiddleCount;
10821 ++m_2ndNullItemsCount;
10833 CleanupAfterFree();
10839 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10841 uint32_t lostAllocationCount = 0;
10843 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10844 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10846 VmaSuballocation& suballoc = suballocations1st[i];
10847 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10848 suballoc.hAllocation->CanBecomeLost() &&
10849 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10851 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10852 suballoc.hAllocation = VK_NULL_HANDLE;
10853 ++m_1stNullItemsMiddleCount;
10854 m_SumFreeSize += suballoc.size;
10855 ++lostAllocationCount;
10859 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10860 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10862 VmaSuballocation& suballoc = suballocations2nd[i];
10863 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10864 suballoc.hAllocation->CanBecomeLost() &&
10865 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10867 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10868 suballoc.hAllocation = VK_NULL_HANDLE;
10869 ++m_2ndNullItemsCount;
10870 m_SumFreeSize += suballoc.size;
10871 ++lostAllocationCount;
10875 if(lostAllocationCount)
10877 CleanupAfterFree();
10880 return lostAllocationCount;
10883 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10885 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10886 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10888 const VmaSuballocation& suballoc = suballocations1st[i];
10889 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10891 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10893 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10894 return VK_ERROR_VALIDATION_FAILED_EXT;
10896 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10898 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10899 return VK_ERROR_VALIDATION_FAILED_EXT;
10904 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10905 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10907 const VmaSuballocation& suballoc = suballocations2nd[i];
10908 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10910 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10912 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10913 return VK_ERROR_VALIDATION_FAILED_EXT;
10915 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10917 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10918 return VK_ERROR_VALIDATION_FAILED_EXT;
10926 void VmaBlockMetadata_Linear::Alloc(
10927 const VmaAllocationRequest& request,
10928 VmaSuballocationType type,
10929 VkDeviceSize allocSize,
10932 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10934 switch(request.type)
10936 case VmaAllocationRequestType::UpperAddress:
10938 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10939 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10940 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10941 suballocations2nd.push_back(newSuballoc);
10942 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10945 case VmaAllocationRequestType::EndOf1st:
10947 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10949 VMA_ASSERT(suballocations1st.empty() ||
10950 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10952 VMA_ASSERT(request.offset + allocSize <= GetSize());
10954 suballocations1st.push_back(newSuballoc);
10957 case VmaAllocationRequestType::EndOf2nd:
10959 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10961 VMA_ASSERT(!suballocations1st.empty() &&
10962 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10963 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10965 switch(m_2ndVectorMode)
10967 case SECOND_VECTOR_EMPTY:
10969 VMA_ASSERT(suballocations2nd.empty());
10970 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10972 case SECOND_VECTOR_RING_BUFFER:
10974 VMA_ASSERT(!suballocations2nd.empty());
10976 case SECOND_VECTOR_DOUBLE_STACK:
10977 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10983 suballocations2nd.push_back(newSuballoc);
10987 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10990 m_SumFreeSize -= newSuballoc.size;
10993 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10995 FreeAtOffset(allocation->GetOffset());
10998 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
11000 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11001 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11003 if(!suballocations1st.empty())
11006 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
11007 if(firstSuballoc.offset == offset)
11009 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
11010 firstSuballoc.hAllocation = VK_NULL_HANDLE;
11011 m_SumFreeSize += firstSuballoc.size;
11012 ++m_1stNullItemsBeginCount;
11013 CleanupAfterFree();
11019 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
11020 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
11022 VmaSuballocation& lastSuballoc = suballocations2nd.back();
11023 if(lastSuballoc.offset == offset)
11025 m_SumFreeSize += lastSuballoc.size;
11026 suballocations2nd.pop_back();
11027 CleanupAfterFree();
11032 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
11034 VmaSuballocation& lastSuballoc = suballocations1st.back();
11035 if(lastSuballoc.offset == offset)
11037 m_SumFreeSize += lastSuballoc.size;
11038 suballocations1st.pop_back();
11039 CleanupAfterFree();
11046 VmaSuballocation refSuballoc;
11047 refSuballoc.offset = offset;
11049 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
11050 suballocations1st.begin() + m_1stNullItemsBeginCount,
11051 suballocations1st.end(),
11053 VmaSuballocationOffsetLess());
11054 if(it != suballocations1st.end())
11056 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11057 it->hAllocation = VK_NULL_HANDLE;
11058 ++m_1stNullItemsMiddleCount;
11059 m_SumFreeSize += it->size;
11060 CleanupAfterFree();
11065 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
11068 VmaSuballocation refSuballoc;
11069 refSuballoc.offset = offset;
11071 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
11072 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
11073 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
11074 if(it != suballocations2nd.end())
11076 it->type = VMA_SUBALLOCATION_TYPE_FREE;
11077 it->hAllocation = VK_NULL_HANDLE;
11078 ++m_2ndNullItemsCount;
11079 m_SumFreeSize += it->size;
11080 CleanupAfterFree();
11085 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
11088 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const
11090 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11091 const size_t suballocCount = AccessSuballocations1st().size();
11092 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
11095 void VmaBlockMetadata_Linear::CleanupAfterFree()
11097 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
11098 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
11102 suballocations1st.clear();
11103 suballocations2nd.clear();
11104 m_1stNullItemsBeginCount = 0;
11105 m_1stNullItemsMiddleCount = 0;
11106 m_2ndNullItemsCount = 0;
11107 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11111 const size_t suballoc1stCount = suballocations1st.size();
11112 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
11113 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
11116 while(m_1stNullItemsBeginCount < suballoc1stCount &&
11117 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11119 ++m_1stNullItemsBeginCount;
11120 --m_1stNullItemsMiddleCount;
11124 while(m_1stNullItemsMiddleCount > 0 &&
11125 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
11127 --m_1stNullItemsMiddleCount;
11128 suballocations1st.pop_back();
11132 while(m_2ndNullItemsCount > 0 &&
11133 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
11135 --m_2ndNullItemsCount;
11136 suballocations2nd.pop_back();
11140 while(m_2ndNullItemsCount > 0 &&
11141 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
11143 --m_2ndNullItemsCount;
11144 VmaVectorRemove(suballocations2nd, 0);
11147 if(ShouldCompact1st())
11149 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
11150 size_t srcIndex = m_1stNullItemsBeginCount;
11151 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
11153 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
11157 if(dstIndex != srcIndex)
11159 suballocations1st[dstIndex] = suballocations1st[srcIndex];
11163 suballocations1st.resize(nonNullItemCount);
11164 m_1stNullItemsBeginCount = 0;
11165 m_1stNullItemsMiddleCount = 0;
11169 if(suballocations2nd.empty())
11171 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11175 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
11177 suballocations1st.clear();
11178 m_1stNullItemsBeginCount = 0;
11180 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
11183 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
11184 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
11185 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
11186 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
11188 ++m_1stNullItemsBeginCount;
11189 --m_1stNullItemsMiddleCount;
11191 m_2ndNullItemsCount = 0;
11192 m_1stVectorIndex ^= 1;
11197 VMA_HEAVY_ASSERT(Validate());
11204 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
11205 VmaBlockMetadata(hAllocator),
11207 m_AllocationCount(0),
11211 memset(m_FreeList, 0,
sizeof(m_FreeList));
11214 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
11216 DeleteNode(m_Root);
11219 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
11221 VmaBlockMetadata::Init(size);
11223 m_UsableSize = VmaPrevPow2(size);
11224 m_SumFreeSize = m_UsableSize;
11228 while(m_LevelCount < MAX_LEVELS &&
11229 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
11234 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
11235 rootNode->offset = 0;
11236 rootNode->type = Node::TYPE_FREE;
11237 rootNode->parent = VMA_NULL;
11238 rootNode->buddy = VMA_NULL;
11241 AddToFreeListFront(0, rootNode);
11244 bool VmaBlockMetadata_Buddy::Validate()
const
11247 ValidationContext ctx;
11248 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
11250 VMA_VALIDATE(
false &&
"ValidateNode failed.");
11252 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
11253 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
11256 for(uint32_t level = 0; level < m_LevelCount; ++level)
11258 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
11259 m_FreeList[level].front->free.prev == VMA_NULL);
11261 for(Node* node = m_FreeList[level].front;
11263 node = node->free.next)
11265 VMA_VALIDATE(node->type == Node::TYPE_FREE);
11267 if(node->free.next == VMA_NULL)
11269 VMA_VALIDATE(m_FreeList[level].back == node);
11273 VMA_VALIDATE(node->free.next->free.prev == node);
11279 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
11281 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
11287 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const
11289 for(uint32_t level = 0; level < m_LevelCount; ++level)
11291 if(m_FreeList[level].front != VMA_NULL)
11293 return LevelToNodeSize(level);
11299 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const
11301 const VkDeviceSize unusableSize = GetUnusableSize();
11312 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
11314 if(unusableSize > 0)
11323 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const
11325 const VkDeviceSize unusableSize = GetUnusableSize();
11327 inoutStats.
size += GetSize();
11328 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
11333 if(unusableSize > 0)
11340 #if VMA_STATS_STRING_ENABLED
11342 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const
11346 CalcAllocationStatInfo(stat);
11348 PrintDetailedMap_Begin(
11354 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
11356 const VkDeviceSize unusableSize = GetUnusableSize();
11357 if(unusableSize > 0)
11359 PrintDetailedMap_UnusedRange(json,
11364 PrintDetailedMap_End(json);
11367 #endif // #if VMA_STATS_STRING_ENABLED
11369 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
11370 uint32_t currentFrameIndex,
11371 uint32_t frameInUseCount,
11372 VkDeviceSize bufferImageGranularity,
11373 VkDeviceSize allocSize,
11374 VkDeviceSize allocAlignment,
11376 VmaSuballocationType allocType,
11377 bool canMakeOtherLost,
11379 VmaAllocationRequest* pAllocationRequest)
11381 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
11385 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
11386 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
11387 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
11389 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
11390 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
11393 if(allocSize > m_UsableSize)
11398 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11399 for(uint32_t level = targetLevel + 1; level--; )
11401 for(Node* freeNode = m_FreeList[level].front;
11402 freeNode != VMA_NULL;
11403 freeNode = freeNode->free.next)
11405 if(freeNode->offset % allocAlignment == 0)
11407 pAllocationRequest->type = VmaAllocationRequestType::Normal;
11408 pAllocationRequest->offset = freeNode->offset;
11409 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
11410 pAllocationRequest->sumItemSize = 0;
11411 pAllocationRequest->itemsToMakeLostCount = 0;
11412 pAllocationRequest->customData = (
void*)(uintptr_t)level;
11421 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
11422 uint32_t currentFrameIndex,
11423 uint32_t frameInUseCount,
11424 VmaAllocationRequest* pAllocationRequest)
11430 return pAllocationRequest->itemsToMakeLostCount == 0;
11433 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
11442 void VmaBlockMetadata_Buddy::Alloc(
11443 const VmaAllocationRequest& request,
11444 VmaSuballocationType type,
11445 VkDeviceSize allocSize,
11448 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
11450 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
11451 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
11453 Node* currNode = m_FreeList[currLevel].front;
11454 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11455 while(currNode->offset != request.offset)
11457 currNode = currNode->free.next;
11458 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
11462 while(currLevel < targetLevel)
11466 RemoveFromFreeList(currLevel, currNode);
11468 const uint32_t childrenLevel = currLevel + 1;
11471 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
11472 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
11474 leftChild->offset = currNode->offset;
11475 leftChild->type = Node::TYPE_FREE;
11476 leftChild->parent = currNode;
11477 leftChild->buddy = rightChild;
11479 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
11480 rightChild->type = Node::TYPE_FREE;
11481 rightChild->parent = currNode;
11482 rightChild->buddy = leftChild;
11485 currNode->type = Node::TYPE_SPLIT;
11486 currNode->split.leftChild = leftChild;
11489 AddToFreeListFront(childrenLevel, rightChild);
11490 AddToFreeListFront(childrenLevel, leftChild);
11495 currNode = m_FreeList[currLevel].front;
11504 VMA_ASSERT(currLevel == targetLevel &&
11505 currNode != VMA_NULL &&
11506 currNode->type == Node::TYPE_FREE);
11507 RemoveFromFreeList(currLevel, currNode);
11510 currNode->type = Node::TYPE_ALLOCATION;
11511 currNode->allocation.alloc = hAllocation;
11513 ++m_AllocationCount;
11515 m_SumFreeSize -= allocSize;
11518 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
11520 if(node->type == Node::TYPE_SPLIT)
11522 DeleteNode(node->split.leftChild->buddy);
11523 DeleteNode(node->split.leftChild);
11526 vma_delete(GetAllocationCallbacks(), node);
11529 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const
11531 VMA_VALIDATE(level < m_LevelCount);
11532 VMA_VALIDATE(curr->parent == parent);
11533 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
11534 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
11537 case Node::TYPE_FREE:
11539 ctx.calculatedSumFreeSize += levelNodeSize;
11540 ++ctx.calculatedFreeCount;
11542 case Node::TYPE_ALLOCATION:
11543 ++ctx.calculatedAllocationCount;
11544 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
11545 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
11547 case Node::TYPE_SPLIT:
11549 const uint32_t childrenLevel = level + 1;
11550 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
11551 const Node*
const leftChild = curr->split.leftChild;
11552 VMA_VALIDATE(leftChild != VMA_NULL);
11553 VMA_VALIDATE(leftChild->offset == curr->offset);
11554 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
11556 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
11558 const Node*
const rightChild = leftChild->buddy;
11559 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
11560 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
11562 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
11573 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const
11576 uint32_t level = 0;
11577 VkDeviceSize currLevelNodeSize = m_UsableSize;
11578 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
11579 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
11582 currLevelNodeSize = nextLevelNodeSize;
11583 nextLevelNodeSize = currLevelNodeSize >> 1;
11588 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
11591 Node* node = m_Root;
11592 VkDeviceSize nodeOffset = 0;
11593 uint32_t level = 0;
11594 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
11595 while(node->type == Node::TYPE_SPLIT)
11597 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
11598 if(offset < nodeOffset + nextLevelSize)
11600 node = node->split.leftChild;
11604 node = node->split.leftChild->buddy;
11605 nodeOffset += nextLevelSize;
11608 levelNodeSize = nextLevelSize;
11611 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
11612 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
11615 --m_AllocationCount;
11616 m_SumFreeSize += alloc->GetSize();
11618 node->type = Node::TYPE_FREE;
11621 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
11623 RemoveFromFreeList(level, node->buddy);
11624 Node*
const parent = node->parent;
11626 vma_delete(GetAllocationCallbacks(), node->buddy);
11627 vma_delete(GetAllocationCallbacks(), node);
11628 parent->type = Node::TYPE_FREE;
11636 AddToFreeListFront(level, node);
11639 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const
11643 case Node::TYPE_FREE:
11649 case Node::TYPE_ALLOCATION:
11651 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11657 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11658 if(unusedRangeSize > 0)
11667 case Node::TYPE_SPLIT:
11669 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11670 const Node*
const leftChild = node->split.leftChild;
11671 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11672 const Node*
const rightChild = leftChild->buddy;
11673 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11681 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11683 VMA_ASSERT(node->type == Node::TYPE_FREE);
11686 Node*
const frontNode = m_FreeList[level].front;
11687 if(frontNode == VMA_NULL)
11689 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11690 node->free.prev = node->free.next = VMA_NULL;
11691 m_FreeList[level].front = m_FreeList[level].back = node;
11695 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11696 node->free.prev = VMA_NULL;
11697 node->free.next = frontNode;
11698 frontNode->free.prev = node;
11699 m_FreeList[level].front = node;
11703 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11705 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11708 if(node->free.prev == VMA_NULL)
11710 VMA_ASSERT(m_FreeList[level].front == node);
11711 m_FreeList[level].front = node->free.next;
11715 Node*
const prevFreeNode = node->free.prev;
11716 VMA_ASSERT(prevFreeNode->free.next == node);
11717 prevFreeNode->free.next = node->free.next;
11721 if(node->free.next == VMA_NULL)
11723 VMA_ASSERT(m_FreeList[level].back == node);
11724 m_FreeList[level].back = node->free.prev;
11728 Node*
const nextFreeNode = node->free.next;
11729 VMA_ASSERT(nextFreeNode->free.prev == node);
11730 nextFreeNode->free.prev = node->free.prev;
11734 #if VMA_STATS_STRING_ENABLED
11735 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const
11739 case Node::TYPE_FREE:
11740 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11742 case Node::TYPE_ALLOCATION:
11744 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11745 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11746 if(allocSize < levelNodeSize)
11748 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11752 case Node::TYPE_SPLIT:
11754 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11755 const Node*
const leftChild = node->split.leftChild;
11756 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11757 const Node*
const rightChild = leftChild->buddy;
11758 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11765 #endif // #if VMA_STATS_STRING_ENABLED
11771 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11772 m_pMetadata(VMA_NULL),
11773 m_MemoryTypeIndex(UINT32_MAX),
11775 m_hMemory(VK_NULL_HANDLE),
11777 m_pMappedData(VMA_NULL)
11781 void VmaDeviceMemoryBlock::Init(
11784 uint32_t newMemoryTypeIndex,
11785 VkDeviceMemory newMemory,
11786 VkDeviceSize newSize,
11788 uint32_t algorithm)
11790 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11792 m_hParentPool = hParentPool;
11793 m_MemoryTypeIndex = newMemoryTypeIndex;
11795 m_hMemory = newMemory;
11800 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11803 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11809 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11811 m_pMetadata->Init(newSize);
11814 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11818 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11820 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11821 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11822 m_hMemory = VK_NULL_HANDLE;
11824 vma_delete(allocator, m_pMetadata);
11825 m_pMetadata = VMA_NULL;
11828 bool VmaDeviceMemoryBlock::Validate()
const
11830 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11831 (m_pMetadata->GetSize() != 0));
11833 return m_pMetadata->Validate();
11836 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11838 void* pData =
nullptr;
11839 VkResult res = Map(hAllocator, 1, &pData);
11840 if(res != VK_SUCCESS)
11845 res = m_pMetadata->CheckCorruption(pData);
11847 Unmap(hAllocator, 1);
11852 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11859 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11860 if(m_MapCount != 0)
11862 m_MapCount += count;
11863 VMA_ASSERT(m_pMappedData != VMA_NULL);
11864 if(ppData != VMA_NULL)
11866 *ppData = m_pMappedData;
11872 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11873 hAllocator->m_hDevice,
11879 if(result == VK_SUCCESS)
11881 if(ppData != VMA_NULL)
11883 *ppData = m_pMappedData;
11885 m_MapCount = count;
11891 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11898 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11899 if(m_MapCount >= count)
11901 m_MapCount -= count;
11902 if(m_MapCount == 0)
11904 m_pMappedData = VMA_NULL;
11905 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11910 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11914 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11916 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11917 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11920 VkResult res = Map(hAllocator, 1, &pData);
11921 if(res != VK_SUCCESS)
11926 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11927 VmaWriteMagicValue(pData, allocOffset + allocSize);
11929 Unmap(hAllocator, 1);
11934 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11936 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11937 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11940 VkResult res = Map(hAllocator, 1, &pData);
11941 if(res != VK_SUCCESS)
11946 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11948 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11950 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11952 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11955 Unmap(hAllocator, 1);
11960 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11963 VkDeviceSize allocationLocalOffset,
11967 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11968 hAllocation->GetBlock() ==
this);
11969 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11970 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11971 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11973 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11974 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11977 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11980 VkDeviceSize allocationLocalOffset,
11984 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11985 hAllocation->GetBlock() ==
this);
11986 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11987 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11988 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11990 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11991 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11996 memset(&outInfo, 0,
sizeof(outInfo));
12015 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
12023 VmaPool_T::VmaPool_T(
12026 VkDeviceSize preferredBlockSize) :
12030 createInfo.memoryTypeIndex,
12031 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
12032 createInfo.minBlockCount,
12033 createInfo.maxBlockCount,
12035 createInfo.frameInUseCount,
12036 createInfo.blockSize != 0,
12043 VmaPool_T::~VmaPool_T()
12047 void VmaPool_T::SetName(
const char* pName)
12049 const VkAllocationCallbacks* allocs = m_BlockVector.GetAllocator()->GetAllocationCallbacks();
12050 VmaFreeString(allocs, m_Name);
12052 if(pName != VMA_NULL)
12054 m_Name = VmaCreateStringCopy(allocs, pName);
12062 #if VMA_STATS_STRING_ENABLED
12064 #endif // #if VMA_STATS_STRING_ENABLED
12066 VmaBlockVector::VmaBlockVector(
12069 uint32_t memoryTypeIndex,
12070 VkDeviceSize preferredBlockSize,
12071 size_t minBlockCount,
12072 size_t maxBlockCount,
12073 VkDeviceSize bufferImageGranularity,
12074 uint32_t frameInUseCount,
12075 bool explicitBlockSize,
12076 uint32_t algorithm) :
12077 m_hAllocator(hAllocator),
12078 m_hParentPool(hParentPool),
12079 m_MemoryTypeIndex(memoryTypeIndex),
12080 m_PreferredBlockSize(preferredBlockSize),
12081 m_MinBlockCount(minBlockCount),
12082 m_MaxBlockCount(maxBlockCount),
12083 m_BufferImageGranularity(bufferImageGranularity),
12084 m_FrameInUseCount(frameInUseCount),
12085 m_ExplicitBlockSize(explicitBlockSize),
12086 m_Algorithm(algorithm),
12087 m_HasEmptyBlock(false),
12088 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
12093 VmaBlockVector::~VmaBlockVector()
12095 for(
size_t i = m_Blocks.size(); i--; )
12097 m_Blocks[i]->Destroy(m_hAllocator);
12098 vma_delete(m_hAllocator, m_Blocks[i]);
12102 VkResult VmaBlockVector::CreateMinBlocks()
12104 for(
size_t i = 0; i < m_MinBlockCount; ++i)
12106 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
12107 if(res != VK_SUCCESS)
12115 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
12117 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12119 const size_t blockCount = m_Blocks.size();
12128 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12130 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12131 VMA_ASSERT(pBlock);
12132 VMA_HEAVY_ASSERT(pBlock->Validate());
12133 pBlock->m_pMetadata->AddPoolStats(*pStats);
12137 bool VmaBlockVector::IsEmpty()
12139 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12140 return m_Blocks.empty();
12143 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const
12145 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12146 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
12147 (VMA_DEBUG_MARGIN > 0) &&
12149 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
12152 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
12154 VkResult VmaBlockVector::Allocate(
12155 uint32_t currentFrameIndex,
12157 VkDeviceSize alignment,
12159 VmaSuballocationType suballocType,
12160 size_t allocationCount,
12164 VkResult res = VK_SUCCESS;
12166 if(IsCorruptionDetectionEnabled())
12168 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12169 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
12173 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12174 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12176 res = AllocatePage(
12182 pAllocations + allocIndex);
12183 if(res != VK_SUCCESS)
12190 if(res != VK_SUCCESS)
12193 while(allocIndex--)
12195 Free(pAllocations[allocIndex]);
12197 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
12203 VkResult VmaBlockVector::AllocatePage(
12204 uint32_t currentFrameIndex,
12206 VkDeviceSize alignment,
12208 VmaSuballocationType suballocType,
12216 VkDeviceSize freeMemory;
12218 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12220 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12224 const bool canFallbackToDedicated = !IsCustomPool();
12225 const bool canCreateNewBlock =
12227 (m_Blocks.size() < m_MaxBlockCount) &&
12228 (freeMemory >= size || !canFallbackToDedicated);
12235 canMakeOtherLost =
false;
12239 if(isUpperAddress &&
12242 return VK_ERROR_FEATURE_NOT_PRESENT;
12256 return VK_ERROR_FEATURE_NOT_PRESENT;
12260 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
12262 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12270 if(!canMakeOtherLost || canCreateNewBlock)
12279 if(!m_Blocks.empty())
12281 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
12282 VMA_ASSERT(pCurrBlock);
12283 VkResult res = AllocateFromBlock(
12293 if(res == VK_SUCCESS)
12295 VMA_DEBUG_LOG(
" Returned from last block #%u", pCurrBlock->GetId());
12305 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12307 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12308 VMA_ASSERT(pCurrBlock);
12309 VkResult res = AllocateFromBlock(
12319 if(res == VK_SUCCESS)
12321 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12329 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12331 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12332 VMA_ASSERT(pCurrBlock);
12333 VkResult res = AllocateFromBlock(
12343 if(res == VK_SUCCESS)
12345 VMA_DEBUG_LOG(
" Returned from existing block #%u", pCurrBlock->GetId());
12353 if(canCreateNewBlock)
12356 VkDeviceSize newBlockSize = m_PreferredBlockSize;
12357 uint32_t newBlockSizeShift = 0;
12358 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
12360 if(!m_ExplicitBlockSize)
12363 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
12364 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
12366 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12367 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
12369 newBlockSize = smallerNewBlockSize;
12370 ++newBlockSizeShift;
12379 size_t newBlockIndex = 0;
12380 VkResult res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12381 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12383 if(!m_ExplicitBlockSize)
12385 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
12387 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
12388 if(smallerNewBlockSize >= size)
12390 newBlockSize = smallerNewBlockSize;
12391 ++newBlockSizeShift;
12392 res = (newBlockSize <= freeMemory || !canFallbackToDedicated) ?
12393 CreateBlock(newBlockSize, &newBlockIndex) : VK_ERROR_OUT_OF_DEVICE_MEMORY;
12402 if(res == VK_SUCCESS)
12404 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
12405 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
12407 res = AllocateFromBlock(
12417 if(res == VK_SUCCESS)
12419 VMA_DEBUG_LOG(
" Created new block #%u Size=%llu", pBlock->GetId(), newBlockSize);
12425 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12432 if(canMakeOtherLost)
12434 uint32_t tryIndex = 0;
12435 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
12437 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
12438 VmaAllocationRequest bestRequest = {};
12439 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
12445 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
12447 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12448 VMA_ASSERT(pCurrBlock);
12449 VmaAllocationRequest currRequest = {};
12450 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12453 m_BufferImageGranularity,
12462 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12463 if(pBestRequestBlock == VMA_NULL ||
12464 currRequestCost < bestRequestCost)
12466 pBestRequestBlock = pCurrBlock;
12467 bestRequest = currRequest;
12468 bestRequestCost = currRequestCost;
12470 if(bestRequestCost == 0)
12481 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12483 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
12484 VMA_ASSERT(pCurrBlock);
12485 VmaAllocationRequest currRequest = {};
12486 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
12489 m_BufferImageGranularity,
12498 const VkDeviceSize currRequestCost = currRequest.CalcCost();
12499 if(pBestRequestBlock == VMA_NULL ||
12500 currRequestCost < bestRequestCost ||
12503 pBestRequestBlock = pCurrBlock;
12504 bestRequest = currRequest;
12505 bestRequestCost = currRequestCost;
12507 if(bestRequestCost == 0 ||
12517 if(pBestRequestBlock != VMA_NULL)
12521 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
12522 if(res != VK_SUCCESS)
12528 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
12534 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12535 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
12536 UpdateHasEmptyBlock();
12537 (*pAllocation)->InitBlockAllocation(
12539 bestRequest.offset,
12546 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
12547 VMA_DEBUG_LOG(
" Returned from existing block");
12548 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
12549 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12550 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12552 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12554 if(IsCorruptionDetectionEnabled())
12556 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
12557 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12572 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
12574 return VK_ERROR_TOO_MANY_OBJECTS;
12578 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12581 void VmaBlockVector::Free(
12584 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
12586 bool budgetExceeded =
false;
12588 const uint32_t heapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex);
12590 m_hAllocator->GetBudget(&heapBudget, heapIndex, 1);
12591 budgetExceeded = heapBudget.
usage >= heapBudget.
budget;
12596 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12598 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12600 if(IsCorruptionDetectionEnabled())
12602 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
12603 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
12606 if(hAllocation->IsPersistentMap())
12608 pBlock->Unmap(m_hAllocator, 1);
12611 pBlock->m_pMetadata->Free(hAllocation);
12612 VMA_HEAVY_ASSERT(pBlock->Validate());
12614 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
12616 const bool canDeleteBlock = m_Blocks.size() > m_MinBlockCount;
12618 if(pBlock->m_pMetadata->IsEmpty())
12621 if((m_HasEmptyBlock || budgetExceeded) && canDeleteBlock)
12623 pBlockToDelete = pBlock;
12630 else if(m_HasEmptyBlock && canDeleteBlock)
12632 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
12633 if(pLastBlock->m_pMetadata->IsEmpty())
12635 pBlockToDelete = pLastBlock;
12636 m_Blocks.pop_back();
12640 UpdateHasEmptyBlock();
12641 IncrementallySortBlocks();
12646 if(pBlockToDelete != VMA_NULL)
12648 VMA_DEBUG_LOG(
" Deleted empty block");
12649 pBlockToDelete->Destroy(m_hAllocator);
12650 vma_delete(m_hAllocator, pBlockToDelete);
12654 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const
12656 VkDeviceSize result = 0;
12657 for(
size_t i = m_Blocks.size(); i--; )
12659 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
12660 if(result >= m_PreferredBlockSize)
12668 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
12670 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12672 if(m_Blocks[blockIndex] == pBlock)
12674 VmaVectorRemove(m_Blocks, blockIndex);
12681 void VmaBlockVector::IncrementallySortBlocks()
12686 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12688 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12690 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12697 VkResult VmaBlockVector::AllocateFromBlock(
12698 VmaDeviceMemoryBlock* pBlock,
12699 uint32_t currentFrameIndex,
12701 VkDeviceSize alignment,
12704 VmaSuballocationType suballocType,
12713 VmaAllocationRequest currRequest = {};
12714 if(pBlock->m_pMetadata->CreateAllocationRequest(
12717 m_BufferImageGranularity,
12727 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12731 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12732 if(res != VK_SUCCESS)
12738 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate(currentFrameIndex, isUserDataString);
12739 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12740 UpdateHasEmptyBlock();
12741 (*pAllocation)->InitBlockAllocation(
12743 currRequest.offset,
12750 VMA_HEAVY_ASSERT(pBlock->Validate());
12751 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12752 m_hAllocator->m_Budget.AddAllocation(m_hAllocator->MemoryTypeIndexToHeapIndex(m_MemoryTypeIndex), size);
12753 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12755 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12757 if(IsCorruptionDetectionEnabled())
12759 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12760 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12764 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12767 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12769 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12770 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12771 allocInfo.allocationSize = blockSize;
12773 #if VMA_BUFFER_DEVICE_ADDRESS
12775 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
12776 if(m_hAllocator->m_UseKhrBufferDeviceAddress)
12778 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
12779 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
12781 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
12783 VkDeviceMemory mem = VK_NULL_HANDLE;
12784 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12793 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12799 allocInfo.allocationSize,
12803 m_Blocks.push_back(pBlock);
12804 if(pNewBlockIndex != VMA_NULL)
12806 *pNewBlockIndex = m_Blocks.size() - 1;
12812 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12813 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12814 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12816 const size_t blockCount = m_Blocks.size();
12817 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12821 BLOCK_FLAG_USED = 0x00000001,
12822 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12830 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12831 blockInfo(blockCount, BlockInfo(), VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12832 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12835 const size_t moveCount = moves.size();
12836 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12838 const VmaDefragmentationMove& move = moves[moveIndex];
12839 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12840 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12843 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12846 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12848 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12849 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12850 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12852 currBlockInfo.pMappedData = pBlock->GetMappedData();
12854 if(currBlockInfo.pMappedData == VMA_NULL)
12856 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12857 if(pDefragCtx->res == VK_SUCCESS)
12859 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12866 if(pDefragCtx->res == VK_SUCCESS)
12868 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12869 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12871 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12873 const VmaDefragmentationMove& move = moves[moveIndex];
12875 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12876 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12878 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12883 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12884 memRange.memory = pSrcBlock->GetDeviceMemory();
12885 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12886 memRange.size = VMA_MIN(
12887 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12888 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12889 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12894 reinterpret_cast<char*
>(dstBlockInfo.pMappedData) + move.dstOffset,
12895 reinterpret_cast<char*
>(srcBlockInfo.pMappedData) + move.srcOffset,
12896 static_cast<size_t>(move.size));
12898 if(IsCorruptionDetectionEnabled())
12900 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12901 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12907 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12908 memRange.memory = pDstBlock->GetDeviceMemory();
12909 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12910 memRange.size = VMA_MIN(
12911 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12912 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12913 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12920 for(
size_t blockIndex = blockCount; blockIndex--; )
12922 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12923 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12925 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12926 pBlock->Unmap(m_hAllocator, 1);
12931 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12932 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12933 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12934 VkCommandBuffer commandBuffer)
12936 const size_t blockCount = m_Blocks.size();
12938 pDefragCtx->blockContexts.resize(blockCount);
12939 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12942 const size_t moveCount = moves.size();
12943 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12945 const VmaDefragmentationMove& move = moves[moveIndex];
12950 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12951 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12955 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12959 VkBufferCreateInfo bufCreateInfo;
12960 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12962 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12964 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12965 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12966 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12968 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12969 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12970 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12971 if(pDefragCtx->res == VK_SUCCESS)
12973 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12974 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12981 if(pDefragCtx->res == VK_SUCCESS)
12983 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12985 const VmaDefragmentationMove& move = moves[moveIndex];
12987 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12988 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12990 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12992 VkBufferCopy region = {
12996 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12997 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
13002 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
13004 pDefragCtx->res = VK_NOT_READY;
13010 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
13012 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
13013 if(pBlock->m_pMetadata->IsEmpty())
13015 if(m_Blocks.size() > m_MinBlockCount)
13017 if(pDefragmentationStats != VMA_NULL)
13020 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
13023 VmaVectorRemove(m_Blocks, blockIndex);
13024 pBlock->Destroy(m_hAllocator);
13025 vma_delete(m_hAllocator, pBlock);
13033 UpdateHasEmptyBlock();
13036 void VmaBlockVector::UpdateHasEmptyBlock()
13038 m_HasEmptyBlock =
false;
13039 for(
size_t index = 0, count = m_Blocks.size(); index < count; ++index)
13041 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[index];
13042 if(pBlock->m_pMetadata->IsEmpty())
13044 m_HasEmptyBlock =
true;
13050 #if VMA_STATS_STRING_ENABLED
13052 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
13054 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13056 json.BeginObject();
13060 const char* poolName = m_hParentPool->GetName();
13061 if(poolName != VMA_NULL && poolName[0] !=
'\0')
13063 json.WriteString(
"Name");
13064 json.WriteString(poolName);
13067 json.WriteString(
"MemoryTypeIndex");
13068 json.WriteNumber(m_MemoryTypeIndex);
13070 json.WriteString(
"BlockSize");
13071 json.WriteNumber(m_PreferredBlockSize);
13073 json.WriteString(
"BlockCount");
13074 json.BeginObject(
true);
13075 if(m_MinBlockCount > 0)
13077 json.WriteString(
"Min");
13078 json.WriteNumber((uint64_t)m_MinBlockCount);
13080 if(m_MaxBlockCount < SIZE_MAX)
13082 json.WriteString(
"Max");
13083 json.WriteNumber((uint64_t)m_MaxBlockCount);
13085 json.WriteString(
"Cur");
13086 json.WriteNumber((uint64_t)m_Blocks.size());
13089 if(m_FrameInUseCount > 0)
13091 json.WriteString(
"FrameInUseCount");
13092 json.WriteNumber(m_FrameInUseCount);
13095 if(m_Algorithm != 0)
13097 json.WriteString(
"Algorithm");
13098 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
13103 json.WriteString(
"PreferredBlockSize");
13104 json.WriteNumber(m_PreferredBlockSize);
13107 json.WriteString(
"Blocks");
13108 json.BeginObject();
13109 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13111 json.BeginString();
13112 json.ContinueString(m_Blocks[i]->GetId());
13115 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
13122 #endif // #if VMA_STATS_STRING_ENABLED
13124 void VmaBlockVector::Defragment(
13125 class VmaBlockVectorDefragmentationContext* pCtx,
13127 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
13128 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
13129 VkCommandBuffer commandBuffer)
13131 pCtx->res = VK_SUCCESS;
13133 const VkMemoryPropertyFlags memPropFlags =
13134 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
13135 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
13137 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
13139 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
13140 !IsCorruptionDetectionEnabled() &&
13141 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
13144 if(canDefragmentOnCpu || canDefragmentOnGpu)
13146 bool defragmentOnGpu;
13148 if(canDefragmentOnGpu != canDefragmentOnCpu)
13150 defragmentOnGpu = canDefragmentOnGpu;
13155 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
13156 m_hAllocator->IsIntegratedGpu();
13159 bool overlappingMoveSupported = !defragmentOnGpu;
13161 if(m_hAllocator->m_UseMutex)
13165 if(!m_Mutex.TryLockWrite())
13167 pCtx->res = VK_ERROR_INITIALIZATION_FAILED;
13173 m_Mutex.LockWrite();
13174 pCtx->mutexLocked =
true;
13178 pCtx->Begin(overlappingMoveSupported, flags);
13182 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
13183 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
13184 pCtx->res = pCtx->GetAlgorithm()->Defragment(pCtx->defragmentationMoves, maxBytesToMove, maxAllocationsToMove, flags);
13187 if(pStats != VMA_NULL)
13189 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
13190 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
13193 VMA_ASSERT(bytesMoved <= maxBytesToMove);
13194 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
13195 if(defragmentOnGpu)
13197 maxGpuBytesToMove -= bytesMoved;
13198 maxGpuAllocationsToMove -= allocationsMoved;
13202 maxCpuBytesToMove -= bytesMoved;
13203 maxCpuAllocationsToMove -= allocationsMoved;
13209 if(m_hAllocator->m_UseMutex)
13210 m_Mutex.UnlockWrite();
13212 if(pCtx->res >= VK_SUCCESS && !pCtx->defragmentationMoves.empty())
13213 pCtx->res = VK_NOT_READY;
13218 if(pCtx->res >= VK_SUCCESS)
13220 if(defragmentOnGpu)
13222 ApplyDefragmentationMovesGpu(pCtx, pCtx->defragmentationMoves, commandBuffer);
13226 ApplyDefragmentationMovesCpu(pCtx, pCtx->defragmentationMoves);
13232 void VmaBlockVector::DefragmentationEnd(
13233 class VmaBlockVectorDefragmentationContext* pCtx,
13239 VMA_ASSERT(pCtx->mutexLocked ==
false);
13243 m_Mutex.LockWrite();
13244 pCtx->mutexLocked =
true;
13248 if(pCtx->mutexLocked || !m_hAllocator->m_UseMutex)
13251 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--;)
13253 VmaBlockDefragmentationContext &blockCtx = pCtx->blockContexts[blockIndex];
13254 if(blockCtx.hBuffer)
13256 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
13260 if(pCtx->res >= VK_SUCCESS)
13262 FreeEmptyBlocks(pStats);
13266 if(pCtx->mutexLocked)
13268 VMA_ASSERT(m_hAllocator->m_UseMutex);
13269 m_Mutex.UnlockWrite();
13273 uint32_t VmaBlockVector::ProcessDefragmentations(
13274 class VmaBlockVectorDefragmentationContext *pCtx,
13277 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13279 const uint32_t moveCount = std::min(uint32_t(pCtx->defragmentationMoves.size()) - pCtx->defragmentationMovesProcessed, maxMoves);
13281 for(uint32_t i = 0; i < moveCount; ++ i)
13283 VmaDefragmentationMove& move = pCtx->defragmentationMoves[pCtx->defragmentationMovesProcessed + i];
13286 pMove->
memory = move.pDstBlock->GetDeviceMemory();
13287 pMove->
offset = move.dstOffset;
13292 pCtx->defragmentationMovesProcessed += moveCount;
13297 void VmaBlockVector::CommitDefragmentations(
13298 class VmaBlockVectorDefragmentationContext *pCtx,
13301 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13303 for(uint32_t i = pCtx->defragmentationMovesCommitted; i < pCtx->defragmentationMovesProcessed; ++ i)
13305 const VmaDefragmentationMove &move = pCtx->defragmentationMoves[i];
13307 move.pSrcBlock->m_pMetadata->FreeAtOffset(move.srcOffset);
13308 move.hAllocation->ChangeBlockAllocation(m_hAllocator, move.pDstBlock, move.dstOffset);
13311 pCtx->defragmentationMovesCommitted = pCtx->defragmentationMovesProcessed;
13312 FreeEmptyBlocks(pStats);
13315 size_t VmaBlockVector::CalcAllocationCount()
const
13318 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13320 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
13325 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const
13327 if(m_BufferImageGranularity == 1)
13331 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
13332 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
13334 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
13335 VMA_ASSERT(m_Algorithm == 0);
13336 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
13337 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
13345 void VmaBlockVector::MakePoolAllocationsLost(
13346 uint32_t currentFrameIndex,
13347 size_t* pLostAllocationCount)
13349 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
13350 size_t lostAllocationCount = 0;
13351 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13353 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13354 VMA_ASSERT(pBlock);
13355 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
13357 if(pLostAllocationCount != VMA_NULL)
13359 *pLostAllocationCount = lostAllocationCount;
13363 VkResult VmaBlockVector::CheckCorruption()
13365 if(!IsCorruptionDetectionEnabled())
13367 return VK_ERROR_FEATURE_NOT_PRESENT;
13370 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13371 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13373 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13374 VMA_ASSERT(pBlock);
13375 VkResult res = pBlock->CheckCorruption(m_hAllocator);
13376 if(res != VK_SUCCESS)
13384 void VmaBlockVector::AddStats(
VmaStats* pStats)
13386 const uint32_t memTypeIndex = m_MemoryTypeIndex;
13387 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
13389 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
13391 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
13393 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
13394 VMA_ASSERT(pBlock);
13395 VMA_HEAVY_ASSERT(pBlock->Validate());
13397 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
13398 VmaAddStatInfo(pStats->
total, allocationStatInfo);
13399 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
13400 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
13407 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
13409 VmaBlockVector* pBlockVector,
13410 uint32_t currentFrameIndex,
13411 bool overlappingMoveSupported) :
13412 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13413 m_AllocationCount(0),
13414 m_AllAllocations(false),
13416 m_AllocationsMoved(0),
13417 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
13420 const size_t blockCount = m_pBlockVector->m_Blocks.size();
13421 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13423 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
13424 pBlockInfo->m_OriginalBlockIndex = blockIndex;
13425 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
13426 m_Blocks.push_back(pBlockInfo);
13430 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
13433 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
13435 for(
size_t i = m_Blocks.size(); i--; )
13437 vma_delete(m_hAllocator, m_Blocks[i]);
13441 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13444 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
13446 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
13447 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
13448 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
13450 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
13451 (*it)->m_Allocations.push_back(allocInfo);
13458 ++m_AllocationCount;
13462 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
13463 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13464 VkDeviceSize maxBytesToMove,
13465 uint32_t maxAllocationsToMove,
13466 bool freeOldAllocations)
13468 if(m_Blocks.empty())
13481 size_t srcBlockMinIndex = 0;
13494 size_t srcBlockIndex = m_Blocks.size() - 1;
13495 size_t srcAllocIndex = SIZE_MAX;
13501 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
13503 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
13506 if(srcBlockIndex == srcBlockMinIndex)
13513 srcAllocIndex = SIZE_MAX;
13518 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
13522 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
13523 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
13525 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
13526 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
13527 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
13528 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
13531 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
13533 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
13534 VmaAllocationRequest dstAllocRequest;
13535 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
13536 m_CurrentFrameIndex,
13537 m_pBlockVector->GetFrameInUseCount(),
13538 m_pBlockVector->GetBufferImageGranularity(),
13545 &dstAllocRequest) &&
13547 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
13549 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
13552 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
13553 (m_BytesMoved + size > maxBytesToMove))
13558 VmaDefragmentationMove move = {};
13559 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
13560 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
13561 move.srcOffset = srcOffset;
13562 move.dstOffset = dstAllocRequest.offset;
13564 move.hAllocation = allocInfo.m_hAllocation;
13565 move.pSrcBlock = pSrcBlockInfo->m_pBlock;
13566 move.pDstBlock = pDstBlockInfo->m_pBlock;
13568 moves.push_back(move);
13570 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
13574 allocInfo.m_hAllocation);
13576 if(freeOldAllocations)
13578 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
13579 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
13582 if(allocInfo.m_pChanged != VMA_NULL)
13584 *allocInfo.m_pChanged = VK_TRUE;
13587 ++m_AllocationsMoved;
13588 m_BytesMoved += size;
13590 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
13598 if(srcAllocIndex > 0)
13604 if(srcBlockIndex > 0)
13607 srcAllocIndex = SIZE_MAX;
13617 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const
13620 for(
size_t i = 0; i < m_Blocks.size(); ++i)
13622 if(m_Blocks[i]->m_HasNonMovableAllocations)
13630 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
13631 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13632 VkDeviceSize maxBytesToMove,
13633 uint32_t maxAllocationsToMove,
13636 if(!m_AllAllocations && m_AllocationCount == 0)
13641 const size_t blockCount = m_Blocks.size();
13642 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13644 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
13646 if(m_AllAllocations)
13648 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
13649 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
13650 it != pMetadata->m_Suballocations.end();
13653 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
13655 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
13656 pBlockInfo->m_Allocations.push_back(allocInfo);
13661 pBlockInfo->CalcHasNonMovableAllocations();
13665 pBlockInfo->SortAllocationsByOffsetDescending();
13671 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
13674 const uint32_t roundCount = 2;
13677 VkResult result = VK_SUCCESS;
13678 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
13686 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
13687 size_t dstBlockIndex, VkDeviceSize dstOffset,
13688 size_t srcBlockIndex, VkDeviceSize srcOffset)
13690 if(dstBlockIndex < srcBlockIndex)
13694 if(dstBlockIndex > srcBlockIndex)
13698 if(dstOffset < srcOffset)
13708 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
13710 VmaBlockVector* pBlockVector,
13711 uint32_t currentFrameIndex,
13712 bool overlappingMoveSupported) :
13713 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
13714 m_OverlappingMoveSupported(overlappingMoveSupported),
13715 m_AllocationCount(0),
13716 m_AllAllocations(false),
13718 m_AllocationsMoved(0),
13719 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
13721 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
13725 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
13729 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
13730 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
13731 VkDeviceSize maxBytesToMove,
13732 uint32_t maxAllocationsToMove,
13735 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
13737 const size_t blockCount = m_pBlockVector->GetBlockCount();
13738 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
13743 PreprocessMetadata();
13747 m_BlockInfos.resize(blockCount);
13748 for(
size_t i = 0; i < blockCount; ++i)
13750 m_BlockInfos[i].origBlockIndex = i;
13753 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
13754 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
13755 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
13760 FreeSpaceDatabase freeSpaceDb;
13762 size_t dstBlockInfoIndex = 0;
13763 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13764 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13765 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13766 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
13767 VkDeviceSize dstOffset = 0;
13770 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
13772 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
13773 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
13774 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
13775 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
13776 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
13778 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
13779 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
13780 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
13781 if(m_AllocationsMoved == maxAllocationsToMove ||
13782 m_BytesMoved + srcAllocSize > maxBytesToMove)
13787 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
13789 VmaDefragmentationMove move = {};
13791 size_t freeSpaceInfoIndex;
13792 VkDeviceSize dstAllocOffset;
13793 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
13794 freeSpaceInfoIndex, dstAllocOffset))
13796 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
13797 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
13798 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
13801 if(freeSpaceInfoIndex == srcBlockInfoIndex)
13803 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13807 VmaSuballocation suballoc = *srcSuballocIt;
13808 suballoc.offset = dstAllocOffset;
13809 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13810 m_BytesMoved += srcAllocSize;
13811 ++m_AllocationsMoved;
13813 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13815 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13816 srcSuballocIt = nextSuballocIt;
13818 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13820 move.srcBlockIndex = srcOrigBlockIndex;
13821 move.dstBlockIndex = freeSpaceOrigBlockIndex;
13822 move.srcOffset = srcAllocOffset;
13823 move.dstOffset = dstAllocOffset;
13824 move.size = srcAllocSize;
13826 moves.push_back(move);
13833 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13835 VmaSuballocation suballoc = *srcSuballocIt;
13836 suballoc.offset = dstAllocOffset;
13837 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13838 m_BytesMoved += srcAllocSize;
13839 ++m_AllocationsMoved;
13841 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13843 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13844 srcSuballocIt = nextSuballocIt;
13846 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13848 move.srcBlockIndex = srcOrigBlockIndex;
13849 move.dstBlockIndex = freeSpaceOrigBlockIndex;
13850 move.srcOffset = srcAllocOffset;
13851 move.dstOffset = dstAllocOffset;
13852 move.size = srcAllocSize;
13854 moves.push_back(move);
13859 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13862 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13863 dstAllocOffset + srcAllocSize > dstBlockSize)
13866 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13868 ++dstBlockInfoIndex;
13869 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13870 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13871 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13872 dstBlockSize = pDstMetadata->GetSize();
13874 dstAllocOffset = 0;
13878 if(dstBlockInfoIndex == srcBlockInfoIndex)
13880 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13882 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13884 bool skipOver = overlap;
13885 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13889 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13894 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13896 dstOffset = srcAllocOffset + srcAllocSize;
13902 srcSuballocIt->offset = dstAllocOffset;
13903 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13904 dstOffset = dstAllocOffset + srcAllocSize;
13905 m_BytesMoved += srcAllocSize;
13906 ++m_AllocationsMoved;
13909 move.srcBlockIndex = srcOrigBlockIndex;
13910 move.dstBlockIndex = dstOrigBlockIndex;
13911 move.srcOffset = srcAllocOffset;
13912 move.dstOffset = dstAllocOffset;
13913 move.size = srcAllocSize;
13915 moves.push_back(move);
13923 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13924 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13926 VmaSuballocation suballoc = *srcSuballocIt;
13927 suballoc.offset = dstAllocOffset;
13928 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13929 dstOffset = dstAllocOffset + srcAllocSize;
13930 m_BytesMoved += srcAllocSize;
13931 ++m_AllocationsMoved;
13933 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13935 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13936 srcSuballocIt = nextSuballocIt;
13938 pDstMetadata->m_Suballocations.push_back(suballoc);
13940 move.srcBlockIndex = srcOrigBlockIndex;
13941 move.dstBlockIndex = dstOrigBlockIndex;
13942 move.srcOffset = srcAllocOffset;
13943 move.dstOffset = dstAllocOffset;
13944 move.size = srcAllocSize;
13946 moves.push_back(move);
13952 m_BlockInfos.clear();
13954 PostprocessMetadata();
13959 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13961 const size_t blockCount = m_pBlockVector->GetBlockCount();
13962 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13964 VmaBlockMetadata_Generic*
const pMetadata =
13965 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13966 pMetadata->m_FreeCount = 0;
13967 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13968 pMetadata->m_FreeSuballocationsBySize.clear();
13969 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13970 it != pMetadata->m_Suballocations.end(); )
13972 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13974 VmaSuballocationList::iterator nextIt = it;
13976 pMetadata->m_Suballocations.erase(it);
13987 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13989 const size_t blockCount = m_pBlockVector->GetBlockCount();
13990 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13992 VmaBlockMetadata_Generic*
const pMetadata =
13993 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13994 const VkDeviceSize blockSize = pMetadata->GetSize();
13997 if(pMetadata->m_Suballocations.empty())
13999 pMetadata->m_FreeCount = 1;
14001 VmaSuballocation suballoc = {
14005 VMA_SUBALLOCATION_TYPE_FREE };
14006 pMetadata->m_Suballocations.push_back(suballoc);
14007 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
14012 VkDeviceSize offset = 0;
14013 VmaSuballocationList::iterator it;
14014 for(it = pMetadata->m_Suballocations.begin();
14015 it != pMetadata->m_Suballocations.end();
14018 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
14019 VMA_ASSERT(it->offset >= offset);
14022 if(it->offset > offset)
14024 ++pMetadata->m_FreeCount;
14025 const VkDeviceSize freeSize = it->offset - offset;
14026 VmaSuballocation suballoc = {
14030 VMA_SUBALLOCATION_TYPE_FREE };
14031 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14032 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14034 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
14038 pMetadata->m_SumFreeSize -= it->size;
14039 offset = it->offset + it->size;
14043 if(offset < blockSize)
14045 ++pMetadata->m_FreeCount;
14046 const VkDeviceSize freeSize = blockSize - offset;
14047 VmaSuballocation suballoc = {
14051 VMA_SUBALLOCATION_TYPE_FREE };
14052 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
14053 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
14054 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
14056 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
14061 pMetadata->m_FreeSuballocationsBySize.begin(),
14062 pMetadata->m_FreeSuballocationsBySize.end(),
14063 VmaSuballocationItemSizeLess());
14066 VMA_HEAVY_ASSERT(pMetadata->Validate());
14070 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
14073 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
14074 while(it != pMetadata->m_Suballocations.end())
14076 if(it->offset < suballoc.offset)
14081 pMetadata->m_Suballocations.insert(it, suballoc);
14087 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
14090 VmaBlockVector* pBlockVector,
14091 uint32_t currFrameIndex) :
14093 mutexLocked(false),
14094 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
14095 defragmentationMoves(VmaStlAllocator<VmaDefragmentationMove>(hAllocator->GetAllocationCallbacks())),
14096 defragmentationMovesProcessed(0),
14097 defragmentationMovesCommitted(0),
14098 hasDefragmentationPlan(0),
14099 m_hAllocator(hAllocator),
14100 m_hCustomPool(hCustomPool),
14101 m_pBlockVector(pBlockVector),
14102 m_CurrFrameIndex(currFrameIndex),
14103 m_pAlgorithm(VMA_NULL),
14104 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
14105 m_AllAllocations(false)
14109 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
14111 vma_delete(m_hAllocator, m_pAlgorithm);
14114 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
14116 AllocInfo info = { hAlloc, pChanged };
14117 m_Allocations.push_back(info);
14120 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported,
VmaDefragmentationFlags flags)
14122 const bool allAllocations = m_AllAllocations ||
14123 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
14136 if(VMA_DEBUG_MARGIN == 0 &&
14138 !m_pBlockVector->IsBufferImageGranularityConflictPossible() &&
14141 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
14142 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14146 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
14147 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
14152 m_pAlgorithm->AddAll();
14156 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
14158 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
14166 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
14168 uint32_t currFrameIndex,
14171 m_hAllocator(hAllocator),
14172 m_CurrFrameIndex(currFrameIndex),
14175 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
14177 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
14180 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
14182 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14184 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
14185 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14186 vma_delete(m_hAllocator, pBlockVectorCtx);
14188 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
14190 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
14191 if(pBlockVectorCtx)
14193 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_Flags, m_pStats);
14194 vma_delete(m_hAllocator, pBlockVectorCtx);
14199 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
14201 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
14203 VmaPool pool = pPools[poolIndex];
14206 if(pool->m_BlockVector.GetAlgorithm() == 0)
14208 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14210 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14212 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
14214 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14219 if(!pBlockVectorDefragCtx)
14221 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14224 &pool->m_BlockVector,
14226 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14229 pBlockVectorDefragCtx->AddAll();
14234 void VmaDefragmentationContext_T::AddAllocations(
14235 uint32_t allocationCount,
14237 VkBool32* pAllocationsChanged)
14240 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14243 VMA_ASSERT(hAlloc);
14245 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
14247 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
14249 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
14251 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
14253 if(hAllocPool != VK_NULL_HANDLE)
14256 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
14258 for(
size_t i = m_CustomPoolContexts.size(); i--; )
14260 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
14262 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
14266 if(!pBlockVectorDefragCtx)
14268 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14271 &hAllocPool->m_BlockVector,
14273 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
14280 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
14281 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
14282 if(!pBlockVectorDefragCtx)
14284 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
14287 m_hAllocator->m_pBlockVectors[memTypeIndex],
14289 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
14293 if(pBlockVectorDefragCtx)
14295 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
14296 &pAllocationsChanged[allocIndex] : VMA_NULL;
14297 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
14303 VkResult VmaDefragmentationContext_T::Defragment(
14304 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
14305 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
14317 m_MaxCpuBytesToMove = maxCpuBytesToMove;
14318 m_MaxCpuAllocationsToMove = maxCpuAllocationsToMove;
14320 m_MaxGpuBytesToMove = maxGpuBytesToMove;
14321 m_MaxGpuAllocationsToMove = maxGpuAllocationsToMove;
14323 if(m_MaxCpuBytesToMove == 0 && m_MaxCpuAllocationsToMove == 0 &&
14324 m_MaxGpuBytesToMove == 0 && m_MaxGpuAllocationsToMove == 0)
14327 return VK_NOT_READY;
14330 if(commandBuffer == VK_NULL_HANDLE)
14332 maxGpuBytesToMove = 0;
14333 maxGpuAllocationsToMove = 0;
14336 VkResult res = VK_SUCCESS;
14339 for(uint32_t memTypeIndex = 0;
14340 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
14343 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14344 if(pBlockVectorCtx)
14346 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14347 pBlockVectorCtx->GetBlockVector()->Defragment(
14350 maxCpuBytesToMove, maxCpuAllocationsToMove,
14351 maxGpuBytesToMove, maxGpuAllocationsToMove,
14353 if(pBlockVectorCtx->res != VK_SUCCESS)
14355 res = pBlockVectorCtx->res;
14361 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14362 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
14365 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14366 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14367 pBlockVectorCtx->GetBlockVector()->Defragment(
14370 maxCpuBytesToMove, maxCpuAllocationsToMove,
14371 maxGpuBytesToMove, maxGpuAllocationsToMove,
14373 if(pBlockVectorCtx->res != VK_SUCCESS)
14375 res = pBlockVectorCtx->res;
14388 for(uint32_t memTypeIndex = 0;
14389 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14392 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14393 if(pBlockVectorCtx)
14395 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14397 if(!pBlockVectorCtx->hasDefragmentationPlan)
14399 pBlockVectorCtx->GetBlockVector()->Defragment(
14402 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14403 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14406 if(pBlockVectorCtx->res < VK_SUCCESS)
14409 pBlockVectorCtx->hasDefragmentationPlan =
true;
14412 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14414 pCurrentMove, movesLeft);
14416 movesLeft -= processed;
14417 pCurrentMove += processed;
14422 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14423 customCtxIndex < customCtxCount;
14426 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14427 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14429 if(!pBlockVectorCtx->hasDefragmentationPlan)
14431 pBlockVectorCtx->GetBlockVector()->Defragment(
14434 m_MaxCpuBytesToMove, m_MaxCpuAllocationsToMove,
14435 m_MaxGpuBytesToMove, m_MaxGpuAllocationsToMove,
14438 if(pBlockVectorCtx->res < VK_SUCCESS)
14441 pBlockVectorCtx->hasDefragmentationPlan =
true;
14444 const uint32_t processed = pBlockVectorCtx->GetBlockVector()->ProcessDefragmentations(
14446 pCurrentMove, movesLeft);
14448 movesLeft -= processed;
14449 pCurrentMove += processed;
14456 VkResult VmaDefragmentationContext_T::DefragmentPassEnd()
14458 VkResult res = VK_SUCCESS;
14461 for(uint32_t memTypeIndex = 0;
14462 memTypeIndex < m_hAllocator->GetMemoryTypeCount();
14465 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
14466 if(pBlockVectorCtx)
14468 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
14470 if(!pBlockVectorCtx->hasDefragmentationPlan)
14472 res = VK_NOT_READY;
14476 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14477 pBlockVectorCtx, m_pStats);
14479 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14480 res = VK_NOT_READY;
14485 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
14486 customCtxIndex < customCtxCount;
14489 VmaBlockVectorDefragmentationContext *pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
14490 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
14492 if(!pBlockVectorCtx->hasDefragmentationPlan)
14494 res = VK_NOT_READY;
14498 pBlockVectorCtx->GetBlockVector()->CommitDefragmentations(
14499 pBlockVectorCtx, m_pStats);
14501 if(pBlockVectorCtx->defragmentationMoves.size() != pBlockVectorCtx->defragmentationMovesCommitted)
14502 res = VK_NOT_READY;
14511 #if VMA_RECORDING_ENABLED
14513 VmaRecorder::VmaRecorder() :
14518 m_StartCounter(INT64_MAX)
14524 m_UseMutex = useMutex;
14525 m_Flags = settings.
flags;
14527 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
14528 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
14531 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
14534 return VK_ERROR_INITIALIZATION_FAILED;
14538 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
14539 fprintf(m_File,
"%s\n",
"1,8");
14544 VmaRecorder::~VmaRecorder()
14546 if(m_File != VMA_NULL)
14552 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
14554 CallParams callParams;
14555 GetBasicParams(callParams);
14557 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14558 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
14562 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
14564 CallParams callParams;
14565 GetBasicParams(callParams);
14567 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14568 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
14574 CallParams callParams;
14575 GetBasicParams(callParams);
14577 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14578 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
14589 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
14591 CallParams callParams;
14592 GetBasicParams(callParams);
14594 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14595 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
14600 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
14601 const VkMemoryRequirements& vkMemReq,
14605 CallParams callParams;
14606 GetBasicParams(callParams);
14608 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14609 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14610 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14612 vkMemReq.alignment,
14613 vkMemReq.memoryTypeBits,
14621 userDataStr.GetString());
14625 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
14626 const VkMemoryRequirements& vkMemReq,
14628 uint64_t allocationCount,
14631 CallParams callParams;
14632 GetBasicParams(callParams);
14634 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14635 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14636 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
14638 vkMemReq.alignment,
14639 vkMemReq.memoryTypeBits,
14646 PrintPointerList(allocationCount, pAllocations);
14647 fprintf(m_File,
",%s\n", userDataStr.GetString());
14651 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
14652 const VkMemoryRequirements& vkMemReq,
14653 bool requiresDedicatedAllocation,
14654 bool prefersDedicatedAllocation,
14658 CallParams callParams;
14659 GetBasicParams(callParams);
14661 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14662 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14663 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14665 vkMemReq.alignment,
14666 vkMemReq.memoryTypeBits,
14667 requiresDedicatedAllocation ? 1 : 0,
14668 prefersDedicatedAllocation ? 1 : 0,
14676 userDataStr.GetString());
14680 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
14681 const VkMemoryRequirements& vkMemReq,
14682 bool requiresDedicatedAllocation,
14683 bool prefersDedicatedAllocation,
14687 CallParams callParams;
14688 GetBasicParams(callParams);
14690 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14691 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
14692 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14694 vkMemReq.alignment,
14695 vkMemReq.memoryTypeBits,
14696 requiresDedicatedAllocation ? 1 : 0,
14697 prefersDedicatedAllocation ? 1 : 0,
14705 userDataStr.GetString());
14709 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
14712 CallParams callParams;
14713 GetBasicParams(callParams);
14715 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14716 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14721 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
14722 uint64_t allocationCount,
14725 CallParams callParams;
14726 GetBasicParams(callParams);
14728 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14729 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
14730 PrintPointerList(allocationCount, pAllocations);
14731 fprintf(m_File,
"\n");
14735 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
14737 const void* pUserData)
14739 CallParams callParams;
14740 GetBasicParams(callParams);
14742 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14743 UserDataString userDataStr(
14746 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14748 userDataStr.GetString());
14752 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
14755 CallParams callParams;
14756 GetBasicParams(callParams);
14758 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14759 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14764 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
14767 CallParams callParams;
14768 GetBasicParams(callParams);
14770 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14771 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14776 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
14779 CallParams callParams;
14780 GetBasicParams(callParams);
14782 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14783 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
14788 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
14789 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14791 CallParams callParams;
14792 GetBasicParams(callParams);
14794 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14795 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14802 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
14803 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
14805 CallParams callParams;
14806 GetBasicParams(callParams);
14808 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14809 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
14816 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
14817 const VkBufferCreateInfo& bufCreateInfo,
14821 CallParams callParams;
14822 GetBasicParams(callParams);
14824 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14825 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14826 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14827 bufCreateInfo.flags,
14828 bufCreateInfo.size,
14829 bufCreateInfo.usage,
14830 bufCreateInfo.sharingMode,
14831 allocCreateInfo.
flags,
14832 allocCreateInfo.
usage,
14836 allocCreateInfo.
pool,
14838 userDataStr.GetString());
14842 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
14843 const VkImageCreateInfo& imageCreateInfo,
14847 CallParams callParams;
14848 GetBasicParams(callParams);
14850 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14851 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
14852 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14853 imageCreateInfo.flags,
14854 imageCreateInfo.imageType,
14855 imageCreateInfo.format,
14856 imageCreateInfo.extent.width,
14857 imageCreateInfo.extent.height,
14858 imageCreateInfo.extent.depth,
14859 imageCreateInfo.mipLevels,
14860 imageCreateInfo.arrayLayers,
14861 imageCreateInfo.samples,
14862 imageCreateInfo.tiling,
14863 imageCreateInfo.usage,
14864 imageCreateInfo.sharingMode,
14865 imageCreateInfo.initialLayout,
14866 allocCreateInfo.
flags,
14867 allocCreateInfo.
usage,
14871 allocCreateInfo.
pool,
14873 userDataStr.GetString());
14877 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
14880 CallParams callParams;
14881 GetBasicParams(callParams);
14883 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14884 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
14889 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
14892 CallParams callParams;
14893 GetBasicParams(callParams);
14895 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14896 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
14901 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
14904 CallParams callParams;
14905 GetBasicParams(callParams);
14907 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14908 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
14913 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
14916 CallParams callParams;
14917 GetBasicParams(callParams);
14919 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14920 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
14925 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
14928 CallParams callParams;
14929 GetBasicParams(callParams);
14931 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14932 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
14937 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
14941 CallParams callParams;
14942 GetBasicParams(callParams);
14944 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14945 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14948 fprintf(m_File,
",");
14950 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14960 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14963 CallParams callParams;
14964 GetBasicParams(callParams);
14966 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14967 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14972 void VmaRecorder::RecordSetPoolName(uint32_t frameIndex,
14976 CallParams callParams;
14977 GetBasicParams(callParams);
14979 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14980 fprintf(m_File,
"%u,%.3f,%u,vmaSetPoolName,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
14981 pool, name != VMA_NULL ? name :
"");
14987 if(pUserData != VMA_NULL)
14991 m_Str = (
const char*)pUserData;
14995 sprintf_s(m_PtrStr,
"%p", pUserData);
15005 void VmaRecorder::WriteConfiguration(
15006 const VkPhysicalDeviceProperties& devProps,
15007 const VkPhysicalDeviceMemoryProperties& memProps,
15008 uint32_t vulkanApiVersion,
15009 bool dedicatedAllocationExtensionEnabled,
15010 bool bindMemory2ExtensionEnabled,
15011 bool memoryBudgetExtensionEnabled,
15012 bool deviceCoherentMemoryExtensionEnabled)
15014 fprintf(m_File,
"Config,Begin\n");
15016 fprintf(m_File,
"VulkanApiVersion,%u,%u\n", VK_VERSION_MAJOR(vulkanApiVersion), VK_VERSION_MINOR(vulkanApiVersion));
15018 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
15019 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
15020 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
15021 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
15022 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
15023 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
15025 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
15026 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
15027 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
15029 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
15030 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
15032 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
15033 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
15035 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
15036 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
15038 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
15039 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
15042 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
15043 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
15044 fprintf(m_File,
"Extension,VK_EXT_memory_budget,%u\n", memoryBudgetExtensionEnabled ? 1 : 0);
15045 fprintf(m_File,
"Extension,VK_AMD_device_coherent_memory,%u\n", deviceCoherentMemoryExtensionEnabled ? 1 : 0);
15047 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
15048 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
15049 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
15050 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
15051 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
15052 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
15053 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
15054 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
15055 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
15057 fprintf(m_File,
"Config,End\n");
15060 void VmaRecorder::GetBasicParams(CallParams& outParams)
15062 outParams.threadId = GetCurrentThreadId();
15064 LARGE_INTEGER counter;
15065 QueryPerformanceCounter(&counter);
15066 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
15069 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
15073 fprintf(m_File,
"%p", pItems[0]);
15074 for(uint64_t i = 1; i < count; ++i)
15076 fprintf(m_File,
" %p", pItems[i]);
15081 void VmaRecorder::Flush()
15089 #endif // #if VMA_RECORDING_ENABLED
15094 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
15095 m_Allocator(pAllocationCallbacks, 1024)
15099 template<
typename... Types>
VmaAllocation VmaAllocationObjectAllocator::Allocate(Types... args)
15101 VmaMutexLock mutexLock(m_Mutex);
15102 return m_Allocator.Alloc<Types...>(std::forward<Types>(args)...);
15105 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
15107 VmaMutexLock mutexLock(m_Mutex);
15108 m_Allocator.Free(hAlloc);
15116 m_VulkanApiVersion(pCreateInfo->vulkanApiVersion != 0 ? pCreateInfo->vulkanApiVersion : VK_API_VERSION_1_0),
15122 m_hDevice(pCreateInfo->device),
15123 m_hInstance(pCreateInfo->instance),
15124 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
15125 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
15126 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
15127 m_AllocationObjectAllocator(&m_AllocationCallbacks),
15128 m_HeapSizeLimitMask(0),
15129 m_PreferredLargeHeapBlockSize(0),
15130 m_PhysicalDevice(pCreateInfo->physicalDevice),
15131 m_CurrentFrameIndex(0),
15132 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
15133 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
15135 m_GlobalMemoryTypeBits(UINT32_MAX)
15137 ,m_pRecorder(VMA_NULL)
15140 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15142 m_UseKhrDedicatedAllocation =
false;
15143 m_UseKhrBindMemory2 =
false;
15146 if(VMA_DEBUG_DETECT_CORRUPTION)
15149 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
15154 if(m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15156 #if !(VMA_DEDICATED_ALLOCATION)
15159 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
15162 #if !(VMA_BIND_MEMORY2)
15165 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
15169 #if !(VMA_MEMORY_BUDGET)
15172 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT set but required extension is disabled by preprocessor macros.");
15175 #if !(VMA_BUFFER_DEVICE_ADDRESS)
15176 if(m_UseKhrBufferDeviceAddress)
15178 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT is set but required extension or Vulkan 1.2 is not available in your Vulkan header or its support in VMA has been disabled by a preprocessor macro.");
15181 #if VMA_VULKAN_VERSION < 1002000
15182 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 2, 0))
15184 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_2 but required Vulkan version is disabled by preprocessor macros.");
15187 #if VMA_VULKAN_VERSION < 1001000
15188 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15190 VMA_ASSERT(0 &&
"vulkanApiVersion >= VK_API_VERSION_1_1 but required Vulkan version is disabled by preprocessor macros.");
15194 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
15195 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
15196 memset(&m_MemProps, 0,
sizeof(m_MemProps));
15198 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
15199 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
15200 memset(&m_VulkanFunctions, 0,
sizeof(m_VulkanFunctions));
15211 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
15212 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
15214 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
15215 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
15216 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
15217 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
15222 m_GlobalMemoryTypeBits = CalculateGlobalMemoryTypeBits();
15226 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
15228 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
15229 if(limit != VK_WHOLE_SIZE)
15231 m_HeapSizeLimitMask |= 1u << heapIndex;
15232 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
15234 m_MemProps.memoryHeaps[heapIndex].size = limit;
15240 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15242 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
15244 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
15248 preferredBlockSize,
15251 GetBufferImageGranularity(),
15257 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
15264 VkResult res = VK_SUCCESS;
15269 #if VMA_RECORDING_ENABLED
15270 m_pRecorder = vma_new(
this, VmaRecorder)();
15272 if(res != VK_SUCCESS)
15276 m_pRecorder->WriteConfiguration(
15277 m_PhysicalDeviceProperties,
15279 m_VulkanApiVersion,
15280 m_UseKhrDedicatedAllocation,
15281 m_UseKhrBindMemory2,
15282 m_UseExtMemoryBudget,
15283 m_UseAmdDeviceCoherentMemory);
15284 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
15286 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
15287 return VK_ERROR_FEATURE_NOT_PRESENT;
15291 #if VMA_MEMORY_BUDGET
15292 if(m_UseExtMemoryBudget)
15294 UpdateVulkanBudget();
15296 #endif // #if VMA_MEMORY_BUDGET
15301 VmaAllocator_T::~VmaAllocator_T()
15303 #if VMA_RECORDING_ENABLED
15304 if(m_pRecorder != VMA_NULL)
15306 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
15307 vma_delete(
this, m_pRecorder);
15311 VMA_ASSERT(m_Pools.empty());
15313 for(
size_t i = GetMemoryTypeCount(); i--; )
15315 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
15317 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
15320 vma_delete(
this, m_pDedicatedAllocations[i]);
15321 vma_delete(
this, m_pBlockVectors[i]);
15325 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
15327 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15328 ImportVulkanFunctions_Static();
15331 if(pVulkanFunctions != VMA_NULL)
15333 ImportVulkanFunctions_Custom(pVulkanFunctions);
15336 #if VMA_DYNAMIC_VULKAN_FUNCTIONS == 1
15337 ImportVulkanFunctions_Dynamic();
15340 ValidateVulkanFunctions();
15343 #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15345 void VmaAllocator_T::ImportVulkanFunctions_Static()
15348 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
15349 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
15350 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
15351 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
15352 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
15353 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
15354 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
15355 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
15356 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
15357 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
15358 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
15359 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
15360 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
15361 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
15362 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
15363 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
15364 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
15367 #if VMA_VULKAN_VERSION >= 1001000
15368 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15370 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR = (PFN_vkGetBufferMemoryRequirements2)vkGetBufferMemoryRequirements2;
15371 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR = (PFN_vkGetImageMemoryRequirements2)vkGetImageMemoryRequirements2;
15372 m_VulkanFunctions.vkBindBufferMemory2KHR = (PFN_vkBindBufferMemory2)vkBindBufferMemory2;
15373 m_VulkanFunctions.vkBindImageMemory2KHR = (PFN_vkBindImageMemory2)vkBindImageMemory2;
15374 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR = (PFN_vkGetPhysicalDeviceMemoryProperties2)vkGetPhysicalDeviceMemoryProperties2;
15379 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1
15381 void VmaAllocator_T::ImportVulkanFunctions_Custom(
const VmaVulkanFunctions* pVulkanFunctions)
15383 VMA_ASSERT(pVulkanFunctions != VMA_NULL);
15385 #define VMA_COPY_IF_NOT_NULL(funcName) \
15386 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName;
15388 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
15389 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
15390 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
15391 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
15392 VMA_COPY_IF_NOT_NULL(vkMapMemory);
15393 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
15394 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
15395 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
15396 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
15397 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
15398 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
15399 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
15400 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
15401 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
15402 VMA_COPY_IF_NOT_NULL(vkCreateImage);
15403 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
15404 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
15406 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15407 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
15408 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
15411 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15412 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
15413 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
15416 #if VMA_MEMORY_BUDGET
15417 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties2KHR);
15420 #undef VMA_COPY_IF_NOT_NULL
15423 void VmaAllocator_T::ImportVulkanFunctions_Dynamic()
15425 #define VMA_FETCH_INSTANCE_FUNC(memberName, functionPointerType, functionNameString) \
15426 if(m_VulkanFunctions.memberName == VMA_NULL) \
15427 m_VulkanFunctions.memberName = \
15428 (functionPointerType)vkGetInstanceProcAddr(m_hInstance, functionNameString);
15429 #define VMA_FETCH_DEVICE_FUNC(memberName, functionPointerType, functionNameString) \
15430 if(m_VulkanFunctions.memberName == VMA_NULL) \
15431 m_VulkanFunctions.memberName = \
15432 (functionPointerType)vkGetDeviceProcAddr(m_hDevice, functionNameString);
15434 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceProperties, PFN_vkGetPhysicalDeviceProperties,
"vkGetPhysicalDeviceProperties");
15435 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties, PFN_vkGetPhysicalDeviceMemoryProperties,
"vkGetPhysicalDeviceMemoryProperties");
15436 VMA_FETCH_DEVICE_FUNC(vkAllocateMemory, PFN_vkAllocateMemory,
"vkAllocateMemory");
15437 VMA_FETCH_DEVICE_FUNC(vkFreeMemory, PFN_vkFreeMemory,
"vkFreeMemory");
15438 VMA_FETCH_DEVICE_FUNC(vkMapMemory, PFN_vkMapMemory,
"vkMapMemory");
15439 VMA_FETCH_DEVICE_FUNC(vkUnmapMemory, PFN_vkUnmapMemory,
"vkUnmapMemory");
15440 VMA_FETCH_DEVICE_FUNC(vkFlushMappedMemoryRanges, PFN_vkFlushMappedMemoryRanges,
"vkFlushMappedMemoryRanges");
15441 VMA_FETCH_DEVICE_FUNC(vkInvalidateMappedMemoryRanges, PFN_vkInvalidateMappedMemoryRanges,
"vkInvalidateMappedMemoryRanges");
15442 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory, PFN_vkBindBufferMemory,
"vkBindBufferMemory");
15443 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory, PFN_vkBindImageMemory,
"vkBindImageMemory");
15444 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements, PFN_vkGetBufferMemoryRequirements,
"vkGetBufferMemoryRequirements");
15445 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements, PFN_vkGetImageMemoryRequirements,
"vkGetImageMemoryRequirements");
15446 VMA_FETCH_DEVICE_FUNC(vkCreateBuffer, PFN_vkCreateBuffer,
"vkCreateBuffer");
15447 VMA_FETCH_DEVICE_FUNC(vkDestroyBuffer, PFN_vkDestroyBuffer,
"vkDestroyBuffer");
15448 VMA_FETCH_DEVICE_FUNC(vkCreateImage, PFN_vkCreateImage,
"vkCreateImage");
15449 VMA_FETCH_DEVICE_FUNC(vkDestroyImage, PFN_vkDestroyImage,
"vkDestroyImage");
15450 VMA_FETCH_DEVICE_FUNC(vkCmdCopyBuffer, PFN_vkCmdCopyBuffer,
"vkCmdCopyBuffer");
15452 #if VMA_DEDICATED_ALLOCATION
15453 if(m_UseKhrDedicatedAllocation)
15455 VMA_FETCH_DEVICE_FUNC(vkGetBufferMemoryRequirements2KHR, PFN_vkGetBufferMemoryRequirements2KHR,
"vkGetBufferMemoryRequirements2KHR");
15456 VMA_FETCH_DEVICE_FUNC(vkGetImageMemoryRequirements2KHR, PFN_vkGetImageMemoryRequirements2KHR,
"vkGetImageMemoryRequirements2KHR");
15460 #if VMA_BIND_MEMORY2
15461 if(m_UseKhrBindMemory2)
15463 VMA_FETCH_DEVICE_FUNC(vkBindBufferMemory2KHR, PFN_vkBindBufferMemory2KHR,
"vkBindBufferMemory2KHR");
15464 VMA_FETCH_DEVICE_FUNC(vkBindImageMemory2KHR, PFN_vkBindImageMemory2KHR,
"vkBindImageMemory2KHR");
15466 #endif // #if VMA_BIND_MEMORY2
15468 #if VMA_MEMORY_BUDGET
15469 if(m_UseExtMemoryBudget && m_VulkanApiVersion < VK_MAKE_VERSION(1, 1, 0))
15471 VMA_FETCH_INSTANCE_FUNC(vkGetPhysicalDeviceMemoryProperties2KHR, PFN_vkGetPhysicalDeviceMemoryProperties2KHR,
"vkGetPhysicalDeviceMemoryProperties2KHR");
15473 #endif // #if VMA_MEMORY_BUDGET
15475 #undef VMA_FETCH_DEVICE_FUNC
15476 #undef VMA_FETCH_INSTANCE_FUNC
15479 void VmaAllocator_T::ValidateVulkanFunctions()
15481 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
15482 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
15483 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
15484 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
15485 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
15486 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
15487 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
15488 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
15489 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
15490 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
15491 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
15492 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
15493 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
15494 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
15495 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
15496 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
15497 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
15499 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15500 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrDedicatedAllocation)
15502 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
15503 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
15507 #if VMA_BIND_MEMORY2 || VMA_VULKAN_VERSION >= 1001000
15508 if(m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0) || m_UseKhrBindMemory2)
15510 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
15511 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
15515 #if VMA_MEMORY_BUDGET || VMA_VULKAN_VERSION >= 1001000
15516 if(m_UseExtMemoryBudget || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15518 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties2KHR != VMA_NULL);
15523 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
15525 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15526 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
15527 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
15528 return VmaAlignUp(isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize, (VkDeviceSize)32);
15531 VkResult VmaAllocator_T::AllocateMemoryOfType(
15533 VkDeviceSize alignment,
15534 bool dedicatedAllocation,
15535 VkBuffer dedicatedBuffer,
15536 VkBufferUsageFlags dedicatedBufferUsage,
15537 VkImage dedicatedImage,
15539 uint32_t memTypeIndex,
15540 VmaSuballocationType suballocType,
15541 size_t allocationCount,
15544 VMA_ASSERT(pAllocations != VMA_NULL);
15545 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
15551 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15561 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
15562 VMA_ASSERT(blockVector);
15564 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
15565 bool preferDedicatedMemory =
15566 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
15567 dedicatedAllocation ||
15569 size > preferredBlockSize / 2;
15571 if(preferDedicatedMemory &&
15573 finalCreateInfo.
pool == VK_NULL_HANDLE)
15582 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15586 return AllocateDedicatedMemory(
15595 dedicatedBufferUsage,
15603 VkResult res = blockVector->Allocate(
15604 m_CurrentFrameIndex.load(),
15611 if(res == VK_SUCCESS)
15619 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15623 res = AllocateDedicatedMemory(
15632 dedicatedBufferUsage,
15636 if(res == VK_SUCCESS)
15639 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
15645 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15652 VkResult VmaAllocator_T::AllocateDedicatedMemory(
15654 VmaSuballocationType suballocType,
15655 uint32_t memTypeIndex,
15658 bool isUserDataString,
15660 VkBuffer dedicatedBuffer,
15661 VkBufferUsageFlags dedicatedBufferUsage,
15662 VkImage dedicatedImage,
15663 size_t allocationCount,
15666 VMA_ASSERT(allocationCount > 0 && pAllocations);
15670 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
15672 GetBudget(&heapBudget, heapIndex, 1);
15673 if(heapBudget.
usage + size * allocationCount > heapBudget.
budget)
15675 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15679 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
15680 allocInfo.memoryTypeIndex = memTypeIndex;
15681 allocInfo.allocationSize = size;
15683 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15684 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
15685 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15687 if(dedicatedBuffer != VK_NULL_HANDLE)
15689 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
15690 dedicatedAllocInfo.buffer = dedicatedBuffer;
15691 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
15693 else if(dedicatedImage != VK_NULL_HANDLE)
15695 dedicatedAllocInfo.image = dedicatedImage;
15696 VmaPnextChainPushFront(&allocInfo, &dedicatedAllocInfo);
15699 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15701 #if VMA_BUFFER_DEVICE_ADDRESS
15702 VkMemoryAllocateFlagsInfoKHR allocFlagsInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR };
15703 if(m_UseKhrBufferDeviceAddress)
15705 bool canContainBufferWithDeviceAddress =
true;
15706 if(dedicatedBuffer != VK_NULL_HANDLE)
15708 canContainBufferWithDeviceAddress = dedicatedBufferUsage == UINT32_MAX ||
15709 (dedicatedBufferUsage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT) != 0;
15711 else if(dedicatedImage != VK_NULL_HANDLE)
15713 canContainBufferWithDeviceAddress =
false;
15715 if(canContainBufferWithDeviceAddress)
15717 allocFlagsInfo.flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR;
15718 VmaPnextChainPushFront(&allocInfo, &allocFlagsInfo);
15721 #endif // #if VMA_BUFFER_DEVICE_ADDRESS
15724 VkResult res = VK_SUCCESS;
15725 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15727 res = AllocateDedicatedMemoryPage(
15735 pAllocations + allocIndex);
15736 if(res != VK_SUCCESS)
15742 if(res == VK_SUCCESS)
15746 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15747 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15748 VMA_ASSERT(pDedicatedAllocations);
15749 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
15751 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
15755 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
15760 while(allocIndex--)
15763 VkDeviceMemory hMemory = currAlloc->GetMemory();
15775 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
15776 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), currAlloc->GetSize());
15777 currAlloc->SetUserData(
this, VMA_NULL);
15778 m_AllocationObjectAllocator.Free(currAlloc);
15781 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15787 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
15789 VmaSuballocationType suballocType,
15790 uint32_t memTypeIndex,
15791 const VkMemoryAllocateInfo& allocInfo,
15793 bool isUserDataString,
15797 VkDeviceMemory hMemory = VK_NULL_HANDLE;
15798 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
15801 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
15805 void* pMappedData = VMA_NULL;
15808 res = (*m_VulkanFunctions.vkMapMemory)(
15817 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
15818 FreeVulkanMemory(memTypeIndex, size, hMemory);
15823 *pAllocation = m_AllocationObjectAllocator.Allocate(m_CurrentFrameIndex.load(), isUserDataString);
15824 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
15825 (*pAllocation)->SetUserData(
this, pUserData);
15826 m_Budget.AddAllocation(MemoryTypeIndexToHeapIndex(memTypeIndex), size);
15827 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
15829 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
15835 void VmaAllocator_T::GetBufferMemoryRequirements(
15837 VkMemoryRequirements& memReq,
15838 bool& requiresDedicatedAllocation,
15839 bool& prefersDedicatedAllocation)
const
15841 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15842 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15844 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
15845 memReqInfo.buffer = hBuffer;
15847 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15849 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15850 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
15852 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15854 memReq = memReq2.memoryRequirements;
15855 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15856 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15859 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15861 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
15862 requiresDedicatedAllocation =
false;
15863 prefersDedicatedAllocation =
false;
15867 void VmaAllocator_T::GetImageMemoryRequirements(
15869 VkMemoryRequirements& memReq,
15870 bool& requiresDedicatedAllocation,
15871 bool& prefersDedicatedAllocation)
const
15873 #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15874 if(m_UseKhrDedicatedAllocation || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0))
15876 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
15877 memReqInfo.image = hImage;
15879 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
15881 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
15882 VmaPnextChainPushFront(&memReq2, &memDedicatedReq);
15884 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
15886 memReq = memReq2.memoryRequirements;
15887 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
15888 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
15891 #endif // #if VMA_DEDICATED_ALLOCATION || VMA_VULKAN_VERSION >= 1001000
15893 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
15894 requiresDedicatedAllocation =
false;
15895 prefersDedicatedAllocation =
false;
15899 VkResult VmaAllocator_T::AllocateMemory(
15900 const VkMemoryRequirements& vkMemReq,
15901 bool requiresDedicatedAllocation,
15902 bool prefersDedicatedAllocation,
15903 VkBuffer dedicatedBuffer,
15904 VkBufferUsageFlags dedicatedBufferUsage,
15905 VkImage dedicatedImage,
15907 VmaSuballocationType suballocType,
15908 size_t allocationCount,
15911 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
15913 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
15915 if(vkMemReq.size == 0)
15917 return VK_ERROR_VALIDATION_FAILED_EXT;
15922 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
15923 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15928 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
15929 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15931 if(requiresDedicatedAllocation)
15935 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
15936 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15938 if(createInfo.
pool != VK_NULL_HANDLE)
15940 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
15941 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15944 if((createInfo.
pool != VK_NULL_HANDLE) &&
15947 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
15948 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
15951 if(createInfo.
pool != VK_NULL_HANDLE)
15953 const VkDeviceSize alignmentForPool = VMA_MAX(
15954 vkMemReq.alignment,
15955 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
15960 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15965 return createInfo.
pool->m_BlockVector.Allocate(
15966 m_CurrentFrameIndex.load(),
15977 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
15978 uint32_t memTypeIndex = UINT32_MAX;
15980 if(res == VK_SUCCESS)
15982 VkDeviceSize alignmentForMemType = VMA_MAX(
15983 vkMemReq.alignment,
15984 GetMemoryTypeMinAlignment(memTypeIndex));
15986 res = AllocateMemoryOfType(
15988 alignmentForMemType,
15989 requiresDedicatedAllocation || prefersDedicatedAllocation,
15991 dedicatedBufferUsage,
15999 if(res == VK_SUCCESS)
16009 memoryTypeBits &= ~(1u << memTypeIndex);
16012 if(res == VK_SUCCESS)
16014 alignmentForMemType = VMA_MAX(
16015 vkMemReq.alignment,
16016 GetMemoryTypeMinAlignment(memTypeIndex));
16018 res = AllocateMemoryOfType(
16020 alignmentForMemType,
16021 requiresDedicatedAllocation || prefersDedicatedAllocation,
16023 dedicatedBufferUsage,
16031 if(res == VK_SUCCESS)
16041 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16052 void VmaAllocator_T::FreeMemory(
16053 size_t allocationCount,
16056 VMA_ASSERT(pAllocations);
16058 for(
size_t allocIndex = allocationCount; allocIndex--; )
16062 if(allocation != VK_NULL_HANDLE)
16064 if(TouchAllocation(allocation))
16066 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
16068 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
16071 switch(allocation->GetType())
16073 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16075 VmaBlockVector* pBlockVector = VMA_NULL;
16076 VmaPool hPool = allocation->GetBlock()->GetParentPool();
16077 if(hPool != VK_NULL_HANDLE)
16079 pBlockVector = &hPool->m_BlockVector;
16083 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16084 pBlockVector = m_pBlockVectors[memTypeIndex];
16086 pBlockVector->Free(allocation);
16089 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16090 FreeDedicatedMemory(allocation);
16098 m_Budget.RemoveAllocation(MemoryTypeIndexToHeapIndex(allocation->GetMemoryTypeIndex()), allocation->GetSize());
16099 allocation->SetUserData(
this, VMA_NULL);
16100 m_AllocationObjectAllocator.Free(allocation);
16105 VkResult VmaAllocator_T::ResizeAllocation(
16107 VkDeviceSize newSize)
16110 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
16112 return VK_ERROR_VALIDATION_FAILED_EXT;
16114 if(newSize == alloc->GetSize())
16118 return VK_ERROR_OUT_OF_POOL_MEMORY;
16121 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
16124 InitStatInfo(pStats->
total);
16125 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
16127 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
16131 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16133 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16134 VMA_ASSERT(pBlockVector);
16135 pBlockVector->AddStats(pStats);
16140 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16141 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16143 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
16148 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16150 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
16151 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16152 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
16153 VMA_ASSERT(pDedicatedAllocVector);
16154 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
16157 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
16158 VmaAddStatInfo(pStats->
total, allocationStatInfo);
16159 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
16160 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
16165 VmaPostprocessCalcStatInfo(pStats->
total);
16166 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
16167 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
16168 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
16169 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
16172 void VmaAllocator_T::GetBudget(
VmaBudget* outBudget, uint32_t firstHeap, uint32_t heapCount)
16174 #if VMA_MEMORY_BUDGET
16175 if(m_UseExtMemoryBudget)
16177 if(m_Budget.m_OperationsSinceBudgetFetch < 30)
16179 VmaMutexLockRead lockRead(m_Budget.m_BudgetMutex, m_UseMutex);
16180 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16182 const uint32_t heapIndex = firstHeap + i;
16184 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16187 if(m_Budget.m_VulkanUsage[heapIndex] + outBudget->
blockBytes > m_Budget.m_BlockBytesAtBudgetFetch[heapIndex])
16189 outBudget->
usage = m_Budget.m_VulkanUsage[heapIndex] +
16190 outBudget->
blockBytes - m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16194 outBudget->
usage = 0;
16198 outBudget->
budget = VMA_MIN(
16199 m_Budget.m_VulkanBudget[heapIndex], m_MemProps.memoryHeaps[heapIndex].size);
16204 UpdateVulkanBudget();
16205 GetBudget(outBudget, firstHeap, heapCount);
16211 for(uint32_t i = 0; i < heapCount; ++i, ++outBudget)
16213 const uint32_t heapIndex = firstHeap + i;
16215 outBudget->
blockBytes = m_Budget.m_BlockBytes[heapIndex];
16219 outBudget->
budget = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16224 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
16226 VkResult VmaAllocator_T::DefragmentationBegin(
16236 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
16237 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
16240 (*pContext)->AddAllocations(
16243 VkResult res = (*pContext)->Defragment(
16248 if(res != VK_NOT_READY)
16250 vma_delete(
this, *pContext);
16251 *pContext = VMA_NULL;
16257 VkResult VmaAllocator_T::DefragmentationEnd(
16260 vma_delete(
this, context);
16264 VkResult VmaAllocator_T::DefragmentationPassBegin(
16268 return context->DefragmentPassBegin(pInfo);
16270 VkResult VmaAllocator_T::DefragmentationPassEnd(
16273 return context->DefragmentPassEnd();
16279 if(hAllocation->CanBecomeLost())
16285 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16286 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16289 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16293 pAllocationInfo->
offset = 0;
16294 pAllocationInfo->
size = hAllocation->GetSize();
16296 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16299 else if(localLastUseFrameIndex == localCurrFrameIndex)
16301 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16302 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16303 pAllocationInfo->
offset = hAllocation->GetOffset();
16304 pAllocationInfo->
size = hAllocation->GetSize();
16306 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16311 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16313 localLastUseFrameIndex = localCurrFrameIndex;
16320 #if VMA_STATS_STRING_ENABLED
16321 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16322 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16325 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16326 if(localLastUseFrameIndex == localCurrFrameIndex)
16332 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16334 localLastUseFrameIndex = localCurrFrameIndex;
16340 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
16341 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
16342 pAllocationInfo->
offset = hAllocation->GetOffset();
16343 pAllocationInfo->
size = hAllocation->GetSize();
16344 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
16345 pAllocationInfo->
pUserData = hAllocation->GetUserData();
16349 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
16352 if(hAllocation->CanBecomeLost())
16354 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16355 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16358 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
16362 else if(localLastUseFrameIndex == localCurrFrameIndex)
16368 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16370 localLastUseFrameIndex = localCurrFrameIndex;
16377 #if VMA_STATS_STRING_ENABLED
16378 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
16379 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
16382 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
16383 if(localLastUseFrameIndex == localCurrFrameIndex)
16389 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
16391 localLastUseFrameIndex = localCurrFrameIndex;
16403 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
16413 return VK_ERROR_INITIALIZATION_FAILED;
16417 ((1u << pCreateInfo->
memoryTypeIndex) & m_GlobalMemoryTypeBits) == 0)
16419 return VK_ERROR_FEATURE_NOT_PRESENT;
16422 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
16424 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
16426 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
16427 if(res != VK_SUCCESS)
16429 vma_delete(
this, *pPool);
16436 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16437 (*pPool)->SetId(m_NextPoolId++);
16438 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
16444 void VmaAllocator_T::DestroyPool(
VmaPool pool)
16448 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
16449 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
16450 VMA_ASSERT(success &&
"Pool not found in Allocator.");
16453 vma_delete(
this, pool);
16458 pool->m_BlockVector.GetPoolStats(pPoolStats);
16461 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
16463 m_CurrentFrameIndex.store(frameIndex);
16465 #if VMA_MEMORY_BUDGET
16466 if(m_UseExtMemoryBudget)
16468 UpdateVulkanBudget();
16470 #endif // #if VMA_MEMORY_BUDGET
16473 void VmaAllocator_T::MakePoolAllocationsLost(
16475 size_t* pLostAllocationCount)
16477 hPool->m_BlockVector.MakePoolAllocationsLost(
16478 m_CurrentFrameIndex.load(),
16479 pLostAllocationCount);
16482 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
16484 return hPool->m_BlockVector.CheckCorruption();
16487 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
16489 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
16492 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16494 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
16496 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
16497 VMA_ASSERT(pBlockVector);
16498 VkResult localRes = pBlockVector->CheckCorruption();
16501 case VK_ERROR_FEATURE_NOT_PRESENT:
16504 finalRes = VK_SUCCESS;
16514 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
16515 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
16517 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
16519 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
16522 case VK_ERROR_FEATURE_NOT_PRESENT:
16525 finalRes = VK_SUCCESS;
16537 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
16539 *pAllocation = m_AllocationObjectAllocator.Allocate(VMA_FRAME_INDEX_LOST,
false);
16540 (*pAllocation)->InitLost();
16543 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
16545 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
16548 if((m_HeapSizeLimitMask & (1u << heapIndex)) != 0)
16550 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
16551 VkDeviceSize blockBytes = m_Budget.m_BlockBytes[heapIndex];
16554 const VkDeviceSize blockBytesAfterAllocation = blockBytes + pAllocateInfo->allocationSize;
16555 if(blockBytesAfterAllocation > heapSize)
16557 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
16559 if(m_Budget.m_BlockBytes[heapIndex].compare_exchange_strong(blockBytes, blockBytesAfterAllocation))
16567 m_Budget.m_BlockBytes[heapIndex] += pAllocateInfo->allocationSize;
16571 VkResult res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
16573 if(res == VK_SUCCESS)
16575 #if VMA_MEMORY_BUDGET
16576 ++m_Budget.m_OperationsSinceBudgetFetch;
16580 if(m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
16582 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize, m_DeviceMemoryCallbacks.
pUserData);
16587 m_Budget.m_BlockBytes[heapIndex] -= pAllocateInfo->allocationSize;
16593 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
16596 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
16598 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size, m_DeviceMemoryCallbacks.
pUserData);
16602 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
16604 m_Budget.m_BlockBytes[MemoryTypeIndexToHeapIndex(memoryType)] -= size;
16607 VkResult VmaAllocator_T::BindVulkanBuffer(
16608 VkDeviceMemory memory,
16609 VkDeviceSize memoryOffset,
16613 if(pNext != VMA_NULL)
16615 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16616 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16617 m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
16619 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
16620 bindBufferMemoryInfo.pNext = pNext;
16621 bindBufferMemoryInfo.buffer = buffer;
16622 bindBufferMemoryInfo.memory = memory;
16623 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16624 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16627 #endif // #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16629 return VK_ERROR_EXTENSION_NOT_PRESENT;
16634 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
16638 VkResult VmaAllocator_T::BindVulkanImage(
16639 VkDeviceMemory memory,
16640 VkDeviceSize memoryOffset,
16644 if(pNext != VMA_NULL)
16646 #if VMA_VULKAN_VERSION >= 1001000 || VMA_BIND_MEMORY2
16647 if((m_UseKhrBindMemory2 || m_VulkanApiVersion >= VK_MAKE_VERSION(1, 1, 0)) &&
16648 m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
16650 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
16651 bindBufferMemoryInfo.pNext = pNext;
16652 bindBufferMemoryInfo.image = image;
16653 bindBufferMemoryInfo.memory = memory;
16654 bindBufferMemoryInfo.memoryOffset = memoryOffset;
16655 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
16658 #endif // #if VMA_BIND_MEMORY2
16660 return VK_ERROR_EXTENSION_NOT_PRESENT;
16665 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
16669 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
16671 if(hAllocation->CanBecomeLost())
16673 return VK_ERROR_MEMORY_MAP_FAILED;
16676 switch(hAllocation->GetType())
16678 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16680 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16681 char *pBytes = VMA_NULL;
16682 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
16683 if(res == VK_SUCCESS)
16685 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
16686 hAllocation->BlockAllocMap();
16690 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16691 return hAllocation->DedicatedAllocMap(
this, ppData);
16694 return VK_ERROR_MEMORY_MAP_FAILED;
16700 switch(hAllocation->GetType())
16702 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16704 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16705 hAllocation->BlockAllocUnmap();
16706 pBlock->Unmap(
this, 1);
16709 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16710 hAllocation->DedicatedAllocUnmap(
this);
16717 VkResult VmaAllocator_T::BindBufferMemory(
16719 VkDeviceSize allocationLocalOffset,
16723 VkResult res = VK_SUCCESS;
16724 switch(hAllocation->GetType())
16726 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16727 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
16729 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16731 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
16732 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
16733 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
16742 VkResult VmaAllocator_T::BindImageMemory(
16744 VkDeviceSize allocationLocalOffset,
16748 VkResult res = VK_SUCCESS;
16749 switch(hAllocation->GetType())
16751 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16752 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
16754 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16756 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
16757 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
16758 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
16767 void VmaAllocator_T::FlushOrInvalidateAllocation(
16769 VkDeviceSize offset, VkDeviceSize size,
16770 VMA_CACHE_OPERATION op)
16772 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
16773 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
16775 const VkDeviceSize allocationSize = hAllocation->GetSize();
16776 VMA_ASSERT(offset <= allocationSize);
16778 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
16780 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
16781 memRange.memory = hAllocation->GetMemory();
16783 switch(hAllocation->GetType())
16785 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
16786 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16787 if(size == VK_WHOLE_SIZE)
16789 memRange.size = allocationSize - memRange.offset;
16793 VMA_ASSERT(offset + size <= allocationSize);
16794 memRange.size = VMA_MIN(
16795 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
16796 allocationSize - memRange.offset);
16800 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
16803 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
16804 if(size == VK_WHOLE_SIZE)
16806 size = allocationSize - offset;
16810 VMA_ASSERT(offset + size <= allocationSize);
16812 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
16815 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
16816 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
16817 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
16818 memRange.offset += allocationOffset;
16819 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
16830 case VMA_CACHE_FLUSH:
16831 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
16833 case VMA_CACHE_INVALIDATE:
16834 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
16843 void VmaAllocator_T::FreeDedicatedMemory(
const VmaAllocation allocation)
16845 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
16847 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
16849 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
16850 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
16851 VMA_ASSERT(pDedicatedAllocations);
16852 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
16853 VMA_ASSERT(success);
16856 VkDeviceMemory hMemory = allocation->GetMemory();
16868 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
16870 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
16873 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const
16875 VkBufferCreateInfo dummyBufCreateInfo;
16876 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
16878 uint32_t memoryTypeBits = 0;
16881 VkBuffer buf = VK_NULL_HANDLE;
16882 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
16883 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
16884 if(res == VK_SUCCESS)
16887 VkMemoryRequirements memReq;
16888 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
16889 memoryTypeBits = memReq.memoryTypeBits;
16892 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
16895 return memoryTypeBits;
16898 uint32_t VmaAllocator_T::CalculateGlobalMemoryTypeBits()
const
16901 VMA_ASSERT(GetMemoryTypeCount() > 0);
16903 uint32_t memoryTypeBits = UINT32_MAX;
16905 if(!m_UseAmdDeviceCoherentMemory)
16908 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
16910 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
16912 memoryTypeBits &= ~(1u << memTypeIndex);
16917 return memoryTypeBits;
16920 #if VMA_MEMORY_BUDGET
16922 void VmaAllocator_T::UpdateVulkanBudget()
16924 VMA_ASSERT(m_UseExtMemoryBudget);
16926 VkPhysicalDeviceMemoryProperties2KHR memProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR };
16928 VkPhysicalDeviceMemoryBudgetPropertiesEXT budgetProps = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT };
16929 VmaPnextChainPushFront(&memProps, &budgetProps);
16931 GetVulkanFunctions().vkGetPhysicalDeviceMemoryProperties2KHR(m_PhysicalDevice, &memProps);
16934 VmaMutexLockWrite lockWrite(m_Budget.m_BudgetMutex, m_UseMutex);
16936 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
16938 m_Budget.m_VulkanUsage[heapIndex] = budgetProps.heapUsage[heapIndex];
16939 m_Budget.m_VulkanBudget[heapIndex] = budgetProps.heapBudget[heapIndex];
16940 m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] = m_Budget.m_BlockBytes[heapIndex].load();
16943 if(m_Budget.m_VulkanBudget[heapIndex] == 0)
16945 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size * 8 / 10;
16947 else if(m_Budget.m_VulkanBudget[heapIndex] > m_MemProps.memoryHeaps[heapIndex].size)
16949 m_Budget.m_VulkanBudget[heapIndex] = m_MemProps.memoryHeaps[heapIndex].size;
16951 if(m_Budget.m_VulkanUsage[heapIndex] == 0 && m_Budget.m_BlockBytesAtBudgetFetch[heapIndex] > 0)
16953 m_Budget.m_VulkanUsage[heapIndex] = m_Budget.m_BlockBytesAtBudgetFetch[heapIndex];
16956 m_Budget.m_OperationsSinceBudgetFetch = 0;
16960 #endif // #if VMA_MEMORY_BUDGET
16962 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
16964 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
16965 !hAllocation->CanBecomeLost() &&
16966 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
16968 void* pData = VMA_NULL;
16969 VkResult res = Map(hAllocation, &pData);
16970 if(res == VK_SUCCESS)
16972 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
16973 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
16974 Unmap(hAllocation);
16978 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
16983 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
16985 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
16986 if(memoryTypeBits == UINT32_MAX)
16988 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
16989 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
16991 return memoryTypeBits;
16994 #if VMA_STATS_STRING_ENABLED
16996 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
16998 bool dedicatedAllocationsStarted =
false;
16999 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17001 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
17002 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
17003 VMA_ASSERT(pDedicatedAllocVector);
17004 if(pDedicatedAllocVector->empty() ==
false)
17006 if(dedicatedAllocationsStarted ==
false)
17008 dedicatedAllocationsStarted =
true;
17009 json.WriteString(
"DedicatedAllocations");
17010 json.BeginObject();
17013 json.BeginString(
"Type ");
17014 json.ContinueString(memTypeIndex);
17019 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
17021 json.BeginObject(
true);
17023 hAlloc->PrintParameters(json);
17030 if(dedicatedAllocationsStarted)
17036 bool allocationsStarted =
false;
17037 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
17039 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
17041 if(allocationsStarted ==
false)
17043 allocationsStarted =
true;
17044 json.WriteString(
"DefaultPools");
17045 json.BeginObject();
17048 json.BeginString(
"Type ");
17049 json.ContinueString(memTypeIndex);
17052 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
17055 if(allocationsStarted)
17063 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
17064 const size_t poolCount = m_Pools.size();
17067 json.WriteString(
"Pools");
17068 json.BeginObject();
17069 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
17071 json.BeginString();
17072 json.ContinueString(m_Pools[poolIndex]->GetId());
17075 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
17082 #endif // #if VMA_STATS_STRING_ENABLED
17091 VMA_ASSERT(pCreateInfo && pAllocator);
17094 VMA_DEBUG_LOG(
"vmaCreateAllocator");
17096 return (*pAllocator)->Init(pCreateInfo);
17102 if(allocator != VK_NULL_HANDLE)
17104 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
17105 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
17106 vma_delete(&allocationCallbacks, allocator);
17112 VMA_ASSERT(allocator && pAllocatorInfo);
17113 pAllocatorInfo->
instance = allocator->m_hInstance;
17114 pAllocatorInfo->
physicalDevice = allocator->GetPhysicalDevice();
17115 pAllocatorInfo->
device = allocator->m_hDevice;
17120 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
17122 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
17123 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
17128 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
17130 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
17131 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
17136 uint32_t memoryTypeIndex,
17137 VkMemoryPropertyFlags* pFlags)
17139 VMA_ASSERT(allocator && pFlags);
17140 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
17141 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
17146 uint32_t frameIndex)
17148 VMA_ASSERT(allocator);
17149 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
17151 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17153 allocator->SetCurrentFrameIndex(frameIndex);
17160 VMA_ASSERT(allocator && pStats);
17161 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17162 allocator->CalculateStats(pStats);
17169 VMA_ASSERT(allocator && pBudget);
17170 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17171 allocator->GetBudget(pBudget, 0, allocator->GetMemoryHeapCount());
17174 #if VMA_STATS_STRING_ENABLED
17178 char** ppStatsString,
17179 VkBool32 detailedMap)
17181 VMA_ASSERT(allocator && ppStatsString);
17182 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17184 VmaStringBuilder sb(allocator);
17186 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
17187 json.BeginObject();
17190 allocator->GetBudget(budget, 0, allocator->GetMemoryHeapCount());
17193 allocator->CalculateStats(&stats);
17195 json.WriteString(
"Total");
17196 VmaPrintStatInfo(json, stats.
total);
17198 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
17200 json.BeginString(
"Heap ");
17201 json.ContinueString(heapIndex);
17203 json.BeginObject();
17205 json.WriteString(
"Size");
17206 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
17208 json.WriteString(
"Flags");
17209 json.BeginArray(
true);
17210 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
17212 json.WriteString(
"DEVICE_LOCAL");
17216 json.WriteString(
"Budget");
17217 json.BeginObject();
17219 json.WriteString(
"BlockBytes");
17220 json.WriteNumber(budget[heapIndex].blockBytes);
17221 json.WriteString(
"AllocationBytes");
17222 json.WriteNumber(budget[heapIndex].allocationBytes);
17223 json.WriteString(
"Usage");
17224 json.WriteNumber(budget[heapIndex].usage);
17225 json.WriteString(
"Budget");
17226 json.WriteNumber(budget[heapIndex].budget);
17232 json.WriteString(
"Stats");
17233 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
17236 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
17238 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
17240 json.BeginString(
"Type ");
17241 json.ContinueString(typeIndex);
17244 json.BeginObject();
17246 json.WriteString(
"Flags");
17247 json.BeginArray(
true);
17248 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
17249 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
17251 json.WriteString(
"DEVICE_LOCAL");
17253 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
17255 json.WriteString(
"HOST_VISIBLE");
17257 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
17259 json.WriteString(
"HOST_COHERENT");
17261 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
17263 json.WriteString(
"HOST_CACHED");
17265 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
17267 json.WriteString(
"LAZILY_ALLOCATED");
17269 if((flags & VK_MEMORY_PROPERTY_PROTECTED_BIT) != 0)
17271 json.WriteString(
" PROTECTED");
17273 if((flags & VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY) != 0)
17275 json.WriteString(
" DEVICE_COHERENT");
17277 if((flags & VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY) != 0)
17279 json.WriteString(
" DEVICE_UNCACHED");
17285 json.WriteString(
"Stats");
17286 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
17295 if(detailedMap == VK_TRUE)
17297 allocator->PrintDetailedMap(json);
17303 const size_t len = sb.GetLength();
17304 char*
const pChars = vma_new_array(allocator,
char, len + 1);
17307 memcpy(pChars, sb.GetData(), len);
17309 pChars[len] =
'\0';
17310 *ppStatsString = pChars;
17315 char* pStatsString)
17317 if(pStatsString != VMA_NULL)
17319 VMA_ASSERT(allocator);
17320 size_t len = strlen(pStatsString);
17321 vma_delete_array(allocator, pStatsString, len + 1);
17325 #endif // #if VMA_STATS_STRING_ENABLED
17332 uint32_t memoryTypeBits,
17334 uint32_t* pMemoryTypeIndex)
17336 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17337 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17338 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17340 memoryTypeBits &= allocator->GetGlobalMemoryTypeBits();
17347 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
17348 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
17349 uint32_t notPreferredFlags = 0;
17352 switch(pAllocationCreateInfo->
usage)
17357 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17359 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17363 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
17366 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17367 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
17369 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17373 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
17374 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
17377 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
17380 requiredFlags |= VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
17389 (VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY | VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD_COPY)) == 0)
17391 notPreferredFlags |= VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD_COPY;
17394 *pMemoryTypeIndex = UINT32_MAX;
17395 uint32_t minCost = UINT32_MAX;
17396 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
17397 memTypeIndex < allocator->GetMemoryTypeCount();
17398 ++memTypeIndex, memTypeBit <<= 1)
17401 if((memTypeBit & memoryTypeBits) != 0)
17403 const VkMemoryPropertyFlags currFlags =
17404 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
17406 if((requiredFlags & ~currFlags) == 0)
17409 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags) +
17410 VmaCountBitsSet(currFlags & notPreferredFlags);
17412 if(currCost < minCost)
17414 *pMemoryTypeIndex = memTypeIndex;
17419 minCost = currCost;
17424 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
17429 const VkBufferCreateInfo* pBufferCreateInfo,
17431 uint32_t* pMemoryTypeIndex)
17433 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17434 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
17435 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17436 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17438 const VkDevice hDev = allocator->m_hDevice;
17439 VkBuffer hBuffer = VK_NULL_HANDLE;
17440 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
17441 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
17442 if(res == VK_SUCCESS)
17444 VkMemoryRequirements memReq = {};
17445 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
17446 hDev, hBuffer, &memReq);
17450 memReq.memoryTypeBits,
17451 pAllocationCreateInfo,
17454 allocator->GetVulkanFunctions().vkDestroyBuffer(
17455 hDev, hBuffer, allocator->GetAllocationCallbacks());
17462 const VkImageCreateInfo* pImageCreateInfo,
17464 uint32_t* pMemoryTypeIndex)
17466 VMA_ASSERT(allocator != VK_NULL_HANDLE);
17467 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
17468 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
17469 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
17471 const VkDevice hDev = allocator->m_hDevice;
17472 VkImage hImage = VK_NULL_HANDLE;
17473 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
17474 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
17475 if(res == VK_SUCCESS)
17477 VkMemoryRequirements memReq = {};
17478 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
17479 hDev, hImage, &memReq);
17483 memReq.memoryTypeBits,
17484 pAllocationCreateInfo,
17487 allocator->GetVulkanFunctions().vkDestroyImage(
17488 hDev, hImage, allocator->GetAllocationCallbacks());
17498 VMA_ASSERT(allocator && pCreateInfo && pPool);
17500 VMA_DEBUG_LOG(
"vmaCreatePool");
17502 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17504 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
17506 #if VMA_RECORDING_ENABLED
17507 if(allocator->GetRecorder() != VMA_NULL)
17509 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
17520 VMA_ASSERT(allocator);
17522 if(pool == VK_NULL_HANDLE)
17527 VMA_DEBUG_LOG(
"vmaDestroyPool");
17529 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17531 #if VMA_RECORDING_ENABLED
17532 if(allocator->GetRecorder() != VMA_NULL)
17534 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
17538 allocator->DestroyPool(pool);
17546 VMA_ASSERT(allocator && pool && pPoolStats);
17548 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17550 allocator->GetPoolStats(pool, pPoolStats);
17556 size_t* pLostAllocationCount)
17558 VMA_ASSERT(allocator && pool);
17560 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17562 #if VMA_RECORDING_ENABLED
17563 if(allocator->GetRecorder() != VMA_NULL)
17565 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
17569 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
17574 VMA_ASSERT(allocator && pool);
17576 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17578 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
17580 return allocator->CheckPoolCorruption(pool);
17586 const char** ppName)
17588 VMA_ASSERT(allocator && pool);
17590 VMA_DEBUG_LOG(
"vmaGetPoolName");
17592 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17594 *ppName = pool->GetName();
17602 VMA_ASSERT(allocator && pool);
17604 VMA_DEBUG_LOG(
"vmaSetPoolName");
17606 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17608 pool->SetName(pName);
17610 #if VMA_RECORDING_ENABLED
17611 if(allocator->GetRecorder() != VMA_NULL)
17613 allocator->GetRecorder()->RecordSetPoolName(allocator->GetCurrentFrameIndex(), pool, pName);
17620 const VkMemoryRequirements* pVkMemoryRequirements,
17625 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
17627 VMA_DEBUG_LOG(
"vmaAllocateMemory");
17629 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17631 VkResult result = allocator->AllocateMemory(
17632 *pVkMemoryRequirements,
17639 VMA_SUBALLOCATION_TYPE_UNKNOWN,
17643 #if VMA_RECORDING_ENABLED
17644 if(allocator->GetRecorder() != VMA_NULL)
17646 allocator->GetRecorder()->RecordAllocateMemory(
17647 allocator->GetCurrentFrameIndex(),
17648 *pVkMemoryRequirements,
17654 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
17656 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17664 const VkMemoryRequirements* pVkMemoryRequirements,
17666 size_t allocationCount,
17670 if(allocationCount == 0)
17675 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
17677 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
17679 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17681 VkResult result = allocator->AllocateMemory(
17682 *pVkMemoryRequirements,
17689 VMA_SUBALLOCATION_TYPE_UNKNOWN,
17693 #if VMA_RECORDING_ENABLED
17694 if(allocator->GetRecorder() != VMA_NULL)
17696 allocator->GetRecorder()->RecordAllocateMemoryPages(
17697 allocator->GetCurrentFrameIndex(),
17698 *pVkMemoryRequirements,
17700 (uint64_t)allocationCount,
17705 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
17707 for(
size_t i = 0; i < allocationCount; ++i)
17709 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
17723 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17725 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
17727 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17729 VkMemoryRequirements vkMemReq = {};
17730 bool requiresDedicatedAllocation =
false;
17731 bool prefersDedicatedAllocation =
false;
17732 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
17733 requiresDedicatedAllocation,
17734 prefersDedicatedAllocation);
17736 VkResult result = allocator->AllocateMemory(
17738 requiresDedicatedAllocation,
17739 prefersDedicatedAllocation,
17744 VMA_SUBALLOCATION_TYPE_BUFFER,
17748 #if VMA_RECORDING_ENABLED
17749 if(allocator->GetRecorder() != VMA_NULL)
17751 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
17752 allocator->GetCurrentFrameIndex(),
17754 requiresDedicatedAllocation,
17755 prefersDedicatedAllocation,
17761 if(pAllocationInfo && result == VK_SUCCESS)
17763 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17776 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
17778 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
17780 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17782 VkMemoryRequirements vkMemReq = {};
17783 bool requiresDedicatedAllocation =
false;
17784 bool prefersDedicatedAllocation =
false;
17785 allocator->GetImageMemoryRequirements(image, vkMemReq,
17786 requiresDedicatedAllocation, prefersDedicatedAllocation);
17788 VkResult result = allocator->AllocateMemory(
17790 requiresDedicatedAllocation,
17791 prefersDedicatedAllocation,
17796 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
17800 #if VMA_RECORDING_ENABLED
17801 if(allocator->GetRecorder() != VMA_NULL)
17803 allocator->GetRecorder()->RecordAllocateMemoryForImage(
17804 allocator->GetCurrentFrameIndex(),
17806 requiresDedicatedAllocation,
17807 prefersDedicatedAllocation,
17813 if(pAllocationInfo && result == VK_SUCCESS)
17815 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17825 VMA_ASSERT(allocator);
17827 if(allocation == VK_NULL_HANDLE)
17832 VMA_DEBUG_LOG(
"vmaFreeMemory");
17834 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17836 #if VMA_RECORDING_ENABLED
17837 if(allocator->GetRecorder() != VMA_NULL)
17839 allocator->GetRecorder()->RecordFreeMemory(
17840 allocator->GetCurrentFrameIndex(),
17845 allocator->FreeMemory(
17852 size_t allocationCount,
17855 if(allocationCount == 0)
17860 VMA_ASSERT(allocator);
17862 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
17864 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17866 #if VMA_RECORDING_ENABLED
17867 if(allocator->GetRecorder() != VMA_NULL)
17869 allocator->GetRecorder()->RecordFreeMemoryPages(
17870 allocator->GetCurrentFrameIndex(),
17871 (uint64_t)allocationCount,
17876 allocator->FreeMemory(allocationCount, pAllocations);
17882 VkDeviceSize newSize)
17884 VMA_ASSERT(allocator && allocation);
17886 VMA_DEBUG_LOG(
"vmaResizeAllocation");
17888 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17890 return allocator->ResizeAllocation(allocation, newSize);
17898 VMA_ASSERT(allocator && allocation && pAllocationInfo);
17900 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17902 #if VMA_RECORDING_ENABLED
17903 if(allocator->GetRecorder() != VMA_NULL)
17905 allocator->GetRecorder()->RecordGetAllocationInfo(
17906 allocator->GetCurrentFrameIndex(),
17911 allocator->GetAllocationInfo(allocation, pAllocationInfo);
17918 VMA_ASSERT(allocator && allocation);
17920 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17922 #if VMA_RECORDING_ENABLED
17923 if(allocator->GetRecorder() != VMA_NULL)
17925 allocator->GetRecorder()->RecordTouchAllocation(
17926 allocator->GetCurrentFrameIndex(),
17931 return allocator->TouchAllocation(allocation);
17939 VMA_ASSERT(allocator && allocation);
17941 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17943 allocation->SetUserData(allocator, pUserData);
17945 #if VMA_RECORDING_ENABLED
17946 if(allocator->GetRecorder() != VMA_NULL)
17948 allocator->GetRecorder()->RecordSetAllocationUserData(
17949 allocator->GetCurrentFrameIndex(),
17960 VMA_ASSERT(allocator && pAllocation);
17962 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
17964 allocator->CreateLostAllocation(pAllocation);
17966 #if VMA_RECORDING_ENABLED
17967 if(allocator->GetRecorder() != VMA_NULL)
17969 allocator->GetRecorder()->RecordCreateLostAllocation(
17970 allocator->GetCurrentFrameIndex(),
17981 VMA_ASSERT(allocator && allocation && ppData);
17983 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17985 VkResult res = allocator->Map(allocation, ppData);
17987 #if VMA_RECORDING_ENABLED
17988 if(allocator->GetRecorder() != VMA_NULL)
17990 allocator->GetRecorder()->RecordMapMemory(
17991 allocator->GetCurrentFrameIndex(),
18003 VMA_ASSERT(allocator && allocation);
18005 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18007 #if VMA_RECORDING_ENABLED
18008 if(allocator->GetRecorder() != VMA_NULL)
18010 allocator->GetRecorder()->RecordUnmapMemory(
18011 allocator->GetCurrentFrameIndex(),
18016 allocator->Unmap(allocation);
18021 VMA_ASSERT(allocator && allocation);
18023 VMA_DEBUG_LOG(
"vmaFlushAllocation");
18025 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18027 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
18029 #if VMA_RECORDING_ENABLED
18030 if(allocator->GetRecorder() != VMA_NULL)
18032 allocator->GetRecorder()->RecordFlushAllocation(
18033 allocator->GetCurrentFrameIndex(),
18034 allocation, offset, size);
18041 VMA_ASSERT(allocator && allocation);
18043 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
18045 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18047 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
18049 #if VMA_RECORDING_ENABLED
18050 if(allocator->GetRecorder() != VMA_NULL)
18052 allocator->GetRecorder()->RecordInvalidateAllocation(
18053 allocator->GetCurrentFrameIndex(),
18054 allocation, offset, size);
18061 VMA_ASSERT(allocator);
18063 VMA_DEBUG_LOG(
"vmaCheckCorruption");
18065 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18067 return allocator->CheckCorruption(memoryTypeBits);
18073 size_t allocationCount,
18074 VkBool32* pAllocationsChanged,
18084 if(pDefragmentationInfo != VMA_NULL)
18098 if(res == VK_NOT_READY)
18111 VMA_ASSERT(allocator && pInfo && pContext);
18122 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
18124 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
18126 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18128 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
18130 #if VMA_RECORDING_ENABLED
18131 if(allocator->GetRecorder() != VMA_NULL)
18133 allocator->GetRecorder()->RecordDefragmentationBegin(
18134 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
18145 VMA_ASSERT(allocator);
18147 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
18149 if(context != VK_NULL_HANDLE)
18151 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18153 #if VMA_RECORDING_ENABLED
18154 if(allocator->GetRecorder() != VMA_NULL)
18156 allocator->GetRecorder()->RecordDefragmentationEnd(
18157 allocator->GetCurrentFrameIndex(), context);
18161 return allocator->DefragmentationEnd(context);
18175 VMA_ASSERT(allocator);
18177 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
moveCount, pInfo->
pMoves));
18179 VMA_DEBUG_LOG(
"vmaBeginDefragmentationPass");
18181 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18183 if(context == VK_NULL_HANDLE)
18189 return allocator->DefragmentationPassBegin(pInfo, context);
18195 VMA_ASSERT(allocator);
18197 VMA_DEBUG_LOG(
"vmaEndDefragmentationPass");
18198 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18200 if(context == VK_NULL_HANDLE)
18203 return allocator->DefragmentationPassEnd(context);
18211 VMA_ASSERT(allocator && allocation && buffer);
18213 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
18215 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18217 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
18223 VkDeviceSize allocationLocalOffset,
18227 VMA_ASSERT(allocator && allocation && buffer);
18229 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
18231 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18233 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
18241 VMA_ASSERT(allocator && allocation && image);
18243 VMA_DEBUG_LOG(
"vmaBindImageMemory");
18245 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18247 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
18253 VkDeviceSize allocationLocalOffset,
18257 VMA_ASSERT(allocator && allocation && image);
18259 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
18261 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18263 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
18268 const VkBufferCreateInfo* pBufferCreateInfo,
18274 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
18276 if(pBufferCreateInfo->size == 0)
18278 return VK_ERROR_VALIDATION_FAILED_EXT;
18280 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_COPY) != 0 &&
18281 !allocator->m_UseKhrBufferDeviceAddress)
18283 VMA_ASSERT(0 &&
"Creating a buffer with VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT is not valid if VMA_ALLOCATOR_CREATE_BUFFER_DEVICE_ADDRESS_BIT was not used.");
18284 return VK_ERROR_VALIDATION_FAILED_EXT;
18287 VMA_DEBUG_LOG(
"vmaCreateBuffer");
18289 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18291 *pBuffer = VK_NULL_HANDLE;
18292 *pAllocation = VK_NULL_HANDLE;
18295 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
18296 allocator->m_hDevice,
18298 allocator->GetAllocationCallbacks(),
18303 VkMemoryRequirements vkMemReq = {};
18304 bool requiresDedicatedAllocation =
false;
18305 bool prefersDedicatedAllocation =
false;
18306 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
18307 requiresDedicatedAllocation, prefersDedicatedAllocation);
18310 res = allocator->AllocateMemory(
18312 requiresDedicatedAllocation,
18313 prefersDedicatedAllocation,
18315 pBufferCreateInfo->usage,
18317 *pAllocationCreateInfo,
18318 VMA_SUBALLOCATION_TYPE_BUFFER,
18322 #if VMA_RECORDING_ENABLED
18323 if(allocator->GetRecorder() != VMA_NULL)
18325 allocator->GetRecorder()->RecordCreateBuffer(
18326 allocator->GetCurrentFrameIndex(),
18327 *pBufferCreateInfo,
18328 *pAllocationCreateInfo,
18338 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
18343 #if VMA_STATS_STRING_ENABLED
18344 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
18346 if(pAllocationInfo != VMA_NULL)
18348 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18353 allocator->FreeMemory(
18356 *pAllocation = VK_NULL_HANDLE;
18357 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18358 *pBuffer = VK_NULL_HANDLE;
18361 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
18362 *pBuffer = VK_NULL_HANDLE;
18373 VMA_ASSERT(allocator);
18375 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18380 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
18382 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18384 #if VMA_RECORDING_ENABLED
18385 if(allocator->GetRecorder() != VMA_NULL)
18387 allocator->GetRecorder()->RecordDestroyBuffer(
18388 allocator->GetCurrentFrameIndex(),
18393 if(buffer != VK_NULL_HANDLE)
18395 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
18398 if(allocation != VK_NULL_HANDLE)
18400 allocator->FreeMemory(
18408 const VkImageCreateInfo* pImageCreateInfo,
18414 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
18416 if(pImageCreateInfo->extent.width == 0 ||
18417 pImageCreateInfo->extent.height == 0 ||
18418 pImageCreateInfo->extent.depth == 0 ||
18419 pImageCreateInfo->mipLevels == 0 ||
18420 pImageCreateInfo->arrayLayers == 0)
18422 return VK_ERROR_VALIDATION_FAILED_EXT;
18425 VMA_DEBUG_LOG(
"vmaCreateImage");
18427 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18429 *pImage = VK_NULL_HANDLE;
18430 *pAllocation = VK_NULL_HANDLE;
18433 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
18434 allocator->m_hDevice,
18436 allocator->GetAllocationCallbacks(),
18440 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
18441 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
18442 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
18445 VkMemoryRequirements vkMemReq = {};
18446 bool requiresDedicatedAllocation =
false;
18447 bool prefersDedicatedAllocation =
false;
18448 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
18449 requiresDedicatedAllocation, prefersDedicatedAllocation);
18451 res = allocator->AllocateMemory(
18453 requiresDedicatedAllocation,
18454 prefersDedicatedAllocation,
18458 *pAllocationCreateInfo,
18463 #if VMA_RECORDING_ENABLED
18464 if(allocator->GetRecorder() != VMA_NULL)
18466 allocator->GetRecorder()->RecordCreateImage(
18467 allocator->GetCurrentFrameIndex(),
18469 *pAllocationCreateInfo,
18479 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
18484 #if VMA_STATS_STRING_ENABLED
18485 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
18487 if(pAllocationInfo != VMA_NULL)
18489 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
18494 allocator->FreeMemory(
18497 *pAllocation = VK_NULL_HANDLE;
18498 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18499 *pImage = VK_NULL_HANDLE;
18502 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
18503 *pImage = VK_NULL_HANDLE;
18514 VMA_ASSERT(allocator);
18516 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
18521 VMA_DEBUG_LOG(
"vmaDestroyImage");
18523 VMA_DEBUG_GLOBAL_MUTEX_LOCK
18525 #if VMA_RECORDING_ENABLED
18526 if(allocator->GetRecorder() != VMA_NULL)
18528 allocator->GetRecorder()->RecordDestroyImage(
18529 allocator->GetCurrentFrameIndex(),
18534 if(image != VK_NULL_HANDLE)
18536 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
18538 if(allocation != VK_NULL_HANDLE)
18540 allocator->FreeMemory(
18546 #endif // #ifdef VMA_IMPLEMENTATION